From e8025d4a5dd68b430b25e60cac823f570bdab3f4 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Mon, 28 Dec 2020 09:40:48 -0800 Subject: [PATCH] feat(v1beta2): remove DOCKER/FLINK enums from Component, add fields to ClusterConfig, InstanceGroupConfig, WorkflowTemplate, WorkflowMetadata (#411) Breaking changes: 1. The `DOCKER` and `FLINK` values have been removed from the `Component` enum, and an `HBASE` value was added. Other changes: 1. There is a new `temp_bucket` field in `ClusterConfig`. 2. There is a new `preemptibility` field in `InstanceGroupConfig`. 3. The `project_id` field of `JobReference` is now optional instead of required. 4. There is a new `dag_timeout` field in `WorkflowTemplate`. 5. There are new `dag_timeout`, `dag_start_time`, and `dag_end_time` fields in `WorkflowMetadata`. 6. There are various updates to the doc comments. PiperOrigin-RevId: 347036369 Source-Link: https://github.com/googleapis/googleapis/commit/6d65640b1fcbdf26ea76cb720de0ac138cae9bed PiperOrigin-RevId: 347029491 Source-Link: https://github.com/googleapis/googleapis/commit/bddb3d7479894576e61a6c88fa745bb24f4c98f7 PiperOrigin-RevId: 346372957 Source-Link: https://github.com/googleapis/googleapis/commit/857f70daef9f4c38f042fb3dfb7b8423ae18fd19 PiperOrigin-RevId: 346132878 Source-Link: https://github.com/googleapis/googleapis/commit/565b9a5f01d7154825c657bca5ee244e86aad918 PiperOrigin-RevId: 345127100 Source-Link: https://github.com/googleapis/googleapis/commit/269083b6ed01aa5a1f445462fcc61e05471cd8f8 --- .../v1/AutoscalingPolicyServiceClient.java | 492 +++----- .../v1/AutoscalingPolicyServiceSettings.java | 36 +- .../dataproc/v1/ClusterControllerClient.java | 623 ++-------- .../v1/ClusterControllerSettings.java | 95 +- .../dataproc/v1/JobControllerClient.java | 448 +------ .../dataproc/v1/JobControllerSettings.java | 44 +- .../v1/WorkflowTemplateServiceClient.java | 1045 ++++++----------- .../v1/WorkflowTemplateServiceSettings.java | 94 +- .../cloud/dataproc/v1/package-info.java | 58 +- .../v1/stub/AutoscalingPolicyServiceStub.java | 9 +- .../AutoscalingPolicyServiceStubSettings.java | 336 ++---- .../v1/stub/ClusterControllerStub.java | 32 +- .../stub/ClusterControllerStubSettings.java | 327 +++--- ...toscalingPolicyServiceCallableFactory.java | 40 +- .../GrpcAutoscalingPolicyServiceStub.java | 39 +- .../GrpcClusterControllerCallableFactory.java | 40 +- .../v1/stub/GrpcClusterControllerStub.java | 191 ++- .../GrpcJobControllerCallableFactory.java | 40 +- .../v1/stub/GrpcJobControllerStub.java | 123 +- ...orkflowTemplateServiceCallableFactory.java | 40 +- .../stub/GrpcWorkflowTemplateServiceStub.java | 191 +-- .../dataproc/v1/stub/JobControllerStub.java | 11 +- .../v1/stub/JobControllerStubSettings.java | 310 ++--- .../v1/stub/WorkflowTemplateServiceStub.java | 30 +- .../WorkflowTemplateServiceStubSettings.java | 348 +++--- .../AutoscalingPolicyServiceClient.java | 492 +++----- .../AutoscalingPolicyServiceSettings.java | 36 +- .../v1beta2/ClusterControllerClient.java | 613 ++-------- .../v1beta2/ClusterControllerSettings.java | 93 +- .../dataproc/v1beta2/JobControllerClient.java | 448 +------ .../v1beta2/JobControllerSettings.java | 44 +- .../WorkflowTemplateServiceClient.java | 1045 ++++++----------- .../WorkflowTemplateServiceSettings.java | 94 +- .../cloud/dataproc/v1beta2/package-info.java | 58 +- .../stub/AutoscalingPolicyServiceStub.java | 9 +- .../AutoscalingPolicyServiceStubSettings.java | 338 ++---- .../v1beta2/stub/ClusterControllerStub.java | 32 +- .../stub/ClusterControllerStubSettings.java | 323 +++-- ...toscalingPolicyServiceCallableFactory.java | 40 +- .../GrpcAutoscalingPolicyServiceStub.java | 39 +- .../GrpcClusterControllerCallableFactory.java | 40 +- .../stub/GrpcClusterControllerStub.java | 189 ++- .../GrpcJobControllerCallableFactory.java | 40 +- .../v1beta2/stub/GrpcJobControllerStub.java | 123 +- ...orkflowTemplateServiceCallableFactory.java | 40 +- .../stub/GrpcWorkflowTemplateServiceStub.java | 191 +-- .../v1beta2/stub/JobControllerStub.java | 11 +- .../stub/JobControllerStubSettings.java | 308 ++--- .../stub/WorkflowTemplateServiceStub.java | 30 +- .../WorkflowTemplateServiceStubSettings.java | 342 +++--- .../AutoscalingPolicyServiceClientTest.java | 408 +++++-- .../v1/ClusterControllerClientTest.java | 288 +++-- .../v1/ClusterControllerSmokeTest.java | 65 - .../dataproc/v1/JobControllerClientTest.java | 324 +++-- .../v1/MockAutoscalingPolicyService.java | 6 +- .../v1/MockAutoscalingPolicyServiceImpl.java | 26 +- .../dataproc/v1/MockClusterController.java | 6 +- .../v1/MockClusterControllerImpl.java | 30 +- .../cloud/dataproc/v1/MockJobController.java | 6 +- .../dataproc/v1/MockJobControllerImpl.java | 34 +- .../v1/MockWorkflowTemplateService.java | 6 +- .../v1/MockWorkflowTemplateServiceImpl.java | 34 +- .../v1/WorkflowTemplateServiceClientTest.java | 718 ++++++++--- .../AutoscalingPolicyServiceClientTest.java | 408 +++++-- .../v1beta2/ClusterControllerClientTest.java | 282 ++--- .../v1beta2/ClusterControllerSmokeTest.java | 65 - .../v1beta2/JobControllerClientTest.java | 339 +++--- .../v1beta2/MockAutoscalingPolicyService.java | 6 +- .../MockAutoscalingPolicyServiceImpl.java | 26 +- .../v1beta2/MockClusterController.java | 6 +- .../v1beta2/MockClusterControllerImpl.java | 30 +- .../dataproc/v1beta2/MockJobController.java | 6 +- .../v1beta2/MockJobControllerImpl.java | 34 +- .../v1beta2/MockWorkflowTemplateService.java | 6 +- .../MockWorkflowTemplateServiceImpl.java | 34 +- .../WorkflowTemplateServiceClientTest.java | 725 +++++++++--- .../v1beta2/WorkflowTemplateServiceGrpc.java | 12 +- .../dataproc/v1/AutoscalingPolicyName.java | 153 +-- .../cloud/dataproc/v1/LocationName.java | 98 +- .../google/cloud/dataproc/v1/RegionName.java | 97 +- .../dataproc/v1/WorkflowTemplateName.java | 153 +-- .../clirr-ignored-differences.xml | 22 + .../v1beta2/AutoscalingPolicyName.java | 153 +-- .../v1beta2/BasicYarnAutoscalingConfig.java | 80 +- .../BasicYarnAutoscalingConfigOrBuilder.java | 20 +- .../cloud/dataproc/v1beta2/ClusterConfig.java | 317 ++++- .../v1beta2/ClusterConfigOrBuilder.java | 61 +- .../cloud/dataproc/v1beta2/ClusterName.java | 223 ++++ .../cloud/dataproc/v1beta2/ClustersProto.java | 413 +++---- .../cloud/dataproc/v1beta2/Component.java | 42 +- .../v1beta2/CreateClusterRequest.java | 63 +- .../CreateClusterRequestOrBuilder.java | 18 +- .../v1beta2/DeleteClusterRequest.java | 63 +- .../DeleteClusterRequestOrBuilder.java | 18 +- .../dataproc/v1beta2/GceClusterConfig.java | 14 +- .../v1beta2/GceClusterConfigOrBuilder.java | 4 +- .../dataproc/v1beta2/InstanceGroupConfig.java | 376 ++++++ .../v1beta2/InstanceGroupConfigOrBuilder.java | 37 + .../cloud/dataproc/v1beta2/JobReference.java | 42 +- .../v1beta2/JobReferenceOrBuilder.java | 12 +- .../cloud/dataproc/v1beta2/JobsProto.java | 2 +- .../dataproc/v1beta2/KerberosConfig.java | 16 +- .../v1beta2/KerberosConfigOrBuilder.java | 4 +- .../dataproc/v1beta2/LifecycleConfig.java | 36 +- .../v1beta2/LifecycleConfigOrBuilder.java | 9 +- .../cloud/dataproc/v1beta2/LocationName.java | 98 +- .../cloud/dataproc/v1beta2/OrderedJob.java | 1000 +++++++++++++--- .../dataproc/v1beta2/OrderedJobOrBuilder.java | 236 +++- .../cloud/dataproc/v1beta2/PySparkJob.java | 91 +- .../dataproc/v1beta2/PySparkJobOrBuilder.java | 28 +- .../cloud/dataproc/v1beta2/RegionName.java | 97 +- .../cloud/dataproc/v1beta2/SharedProto.java | 16 +- .../dataproc/v1beta2/SoftwareConfig.java | 14 +- .../v1beta2/SoftwareConfigOrBuilder.java | 4 +- .../cloud/dataproc/v1beta2/SparkJob.java | 104 +- .../dataproc/v1beta2/SparkJobOrBuilder.java | 32 +- .../cloud/dataproc/v1beta2/SparkRJob.java | 104 +- .../dataproc/v1beta2/SparkRJobOrBuilder.java | 32 +- .../dataproc/v1beta2/TemplateParameter.java | 104 +- .../v1beta2/TemplateParameterOrBuilder.java | 32 +- .../v1beta2/UpdateClusterRequest.java | 63 +- .../UpdateClusterRequestOrBuilder.java | 18 +- .../dataproc/v1beta2/WorkflowMetadata.java | 985 ++++++++++++++++ .../v1beta2/WorkflowMetadataOrBuilder.java | 144 +++ .../dataproc/v1beta2/WorkflowTemplate.java | 380 ++++++ .../v1beta2/WorkflowTemplateName.java | 153 +-- .../v1beta2/WorkflowTemplateOrBuilder.java | 62 + .../v1beta2/WorkflowTemplatesProto.java | 383 +++--- .../v1beta2/autoscaling_policies.proto | 64 +- .../cloud/dataproc/v1beta2/clusters.proto | 198 +++- .../google/cloud/dataproc/v1beta2/jobs.proto | 29 +- .../cloud/dataproc/v1beta2/shared.proto | 9 +- .../dataproc/v1beta2/workflow_templates.proto | 129 +- synth.metadata | 11 +- 134 files changed, 11518 insertions(+), 9638 deletions(-) delete mode 100644 google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1/ClusterControllerSmokeTest.java delete mode 100644 google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1beta2/ClusterControllerSmokeTest.java create mode 100644 proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/ClusterName.java diff --git a/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/AutoscalingPolicyServiceClient.java b/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/AutoscalingPolicyServiceClient.java index 42681501..d85ce9d3 100644 --- a/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/AutoscalingPolicyServiceClient.java +++ b/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/AutoscalingPolicyServiceClient.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,6 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.dataproc.v1; import com.google.api.core.ApiFunction; @@ -34,24 +35,14 @@ import java.util.concurrent.TimeUnit; import javax.annotation.Generated; -// AUTO-GENERATED DOCUMENTATION AND SERVICE +// AUTO-GENERATED DOCUMENTATION AND CLASS. /** * Service Description: The API interface for managing autoscaling policies in the Dataproc API. * *

This class provides the ability to make remote calls to the backing service through method * calls that map to API methods. Sample code to get started: * - *

- * 
- * try (AutoscalingPolicyServiceClient autoscalingPolicyServiceClient = AutoscalingPolicyServiceClient.create()) {
- *   RegionName parent = RegionName.of("[PROJECT]", "[REGION]");
- *   AutoscalingPolicy policy = AutoscalingPolicy.newBuilder().build();
- *   AutoscalingPolicy response = autoscalingPolicyServiceClient.createAutoscalingPolicy(parent, policy);
- * }
- * 
- * 
- * - *

Note: close() needs to be called on the autoscalingPolicyServiceClient object to clean up + *

Note: close() needs to be called on the AutoscalingPolicyServiceClient object to clean up * resources such as threads. In the example above, try-with-resources is used, which automatically * calls close(). * @@ -80,30 +71,28 @@ * *

To customize credentials: * - *

- * 
+ * 
{@code
  * AutoscalingPolicyServiceSettings autoscalingPolicyServiceSettings =
  *     AutoscalingPolicyServiceSettings.newBuilder()
  *         .setCredentialsProvider(FixedCredentialsProvider.create(myCredentials))
  *         .build();
  * AutoscalingPolicyServiceClient autoscalingPolicyServiceClient =
  *     AutoscalingPolicyServiceClient.create(autoscalingPolicyServiceSettings);
- * 
- * 
+ * }
* - * To customize the endpoint: + *

To customize the endpoint: * - *

- * 
+ * 
{@code
  * AutoscalingPolicyServiceSettings autoscalingPolicyServiceSettings =
  *     AutoscalingPolicyServiceSettings.newBuilder().setEndpoint(myEndpoint).build();
  * AutoscalingPolicyServiceClient autoscalingPolicyServiceClient =
  *     AutoscalingPolicyServiceClient.create(autoscalingPolicyServiceSettings);
- * 
- * 
+ * }
+ * + *

Please refer to the GitHub repository's samples for more quickstart code snippets. */ -@Generated("by gapic-generator") @BetaApi +@Generated("by gapic-generator") public class AutoscalingPolicyServiceClient implements BackgroundResource { private final AutoscalingPolicyServiceSettings settings; private final AutoscalingPolicyServiceStub stub; @@ -125,7 +114,7 @@ public static final AutoscalingPolicyServiceClient create( /** * Constructs an instance of AutoscalingPolicyServiceClient, using the given stub for making - * calls. This is for advanced usage - prefer to use AutoscalingPolicyServiceSettings}. + * calls. This is for advanced usage - prefer using create(AutoscalingPolicyServiceSettings). */ @BetaApi("A restructuring of stub classes is planned, so this may break in the future") public static final AutoscalingPolicyServiceClient create(AutoscalingPolicyServiceStub stub) { @@ -158,31 +147,26 @@ public AutoscalingPolicyServiceStub getStub() { return stub; } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Creates new autoscaling policy. * - *

Sample code: - * - *


-   * try (AutoscalingPolicyServiceClient autoscalingPolicyServiceClient = AutoscalingPolicyServiceClient.create()) {
-   *   RegionName parent = RegionName.of("[PROJECT]", "[REGION]");
-   *   AutoscalingPolicy policy = AutoscalingPolicy.newBuilder().build();
-   *   AutoscalingPolicy response = autoscalingPolicyServiceClient.createAutoscalingPolicy(parent, policy);
-   * }
-   * 
- * * @param parent Required. The "resource name" of the region or location, as described in * https://cloud.google.com/apis/design/resource_names. - *

* For `projects.regions.autoscalingPolicies.create`, the resource name of the region - * has the following format: `projects/{project_id}/regions/{region}` - *

* For `projects.locations.autoscalingPolicies.create`, the resource name of the - * location has the following format: `projects/{project_id}/locations/{location}` + *

+ * + * * @param policy Required. The autoscaling policy to create. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ public final AutoscalingPolicy createAutoscalingPolicy( - RegionName parent, AutoscalingPolicy policy) { + LocationName parent, AutoscalingPolicy policy) { CreateAutoscalingPolicyRequest request = CreateAutoscalingPolicyRequest.newBuilder() .setParent(parent == null ? null : parent.toString()) @@ -191,31 +175,26 @@ public final AutoscalingPolicy createAutoscalingPolicy( return createAutoscalingPolicy(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Creates new autoscaling policy. * - *

Sample code: - * - *


-   * try (AutoscalingPolicyServiceClient autoscalingPolicyServiceClient = AutoscalingPolicyServiceClient.create()) {
-   *   LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]");
-   *   AutoscalingPolicy policy = AutoscalingPolicy.newBuilder().build();
-   *   AutoscalingPolicy response = autoscalingPolicyServiceClient.createAutoscalingPolicy(parent, policy);
-   * }
-   * 
- * * @param parent Required. The "resource name" of the region or location, as described in * https://cloud.google.com/apis/design/resource_names. - *

* For `projects.regions.autoscalingPolicies.create`, the resource name of the region - * has the following format: `projects/{project_id}/regions/{region}` - *

* For `projects.locations.autoscalingPolicies.create`, the resource name of the - * location has the following format: `projects/{project_id}/locations/{location}` + *

+ * + * * @param policy Required. The autoscaling policy to create. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ public final AutoscalingPolicy createAutoscalingPolicy( - LocationName parent, AutoscalingPolicy policy) { + RegionName parent, AutoscalingPolicy policy) { CreateAutoscalingPolicyRequest request = CreateAutoscalingPolicyRequest.newBuilder() .setParent(parent == null ? null : parent.toString()) @@ -224,26 +203,21 @@ public final AutoscalingPolicy createAutoscalingPolicy( return createAutoscalingPolicy(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Creates new autoscaling policy. * - *

Sample code: - * - *


-   * try (AutoscalingPolicyServiceClient autoscalingPolicyServiceClient = AutoscalingPolicyServiceClient.create()) {
-   *   RegionName parent = RegionName.of("[PROJECT]", "[REGION]");
-   *   AutoscalingPolicy policy = AutoscalingPolicy.newBuilder().build();
-   *   AutoscalingPolicy response = autoscalingPolicyServiceClient.createAutoscalingPolicy(parent.toString(), policy);
-   * }
-   * 
- * * @param parent Required. The "resource name" of the region or location, as described in * https://cloud.google.com/apis/design/resource_names. - *

* For `projects.regions.autoscalingPolicies.create`, the resource name of the region - * has the following format: `projects/{project_id}/regions/{region}` - *

* For `projects.locations.autoscalingPolicies.create`, the resource name of the - * location has the following format: `projects/{project_id}/locations/{location}` + *

+ * + * * @param policy Required. The autoscaling policy to create. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -253,24 +227,10 @@ public final AutoscalingPolicy createAutoscalingPolicy(String parent, Autoscalin return createAutoscalingPolicy(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Creates new autoscaling policy. * - *

Sample code: - * - *


-   * try (AutoscalingPolicyServiceClient autoscalingPolicyServiceClient = AutoscalingPolicyServiceClient.create()) {
-   *   RegionName parent = RegionName.of("[PROJECT]", "[REGION]");
-   *   AutoscalingPolicy policy = AutoscalingPolicy.newBuilder().build();
-   *   CreateAutoscalingPolicyRequest request = CreateAutoscalingPolicyRequest.newBuilder()
-   *     .setParent(parent.toString())
-   *     .setPolicy(policy)
-   *     .build();
-   *   AutoscalingPolicy response = autoscalingPolicyServiceClient.createAutoscalingPolicy(request);
-   * }
-   * 
- * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -278,46 +238,23 @@ public final AutoscalingPolicy createAutoscalingPolicy(CreateAutoscalingPolicyRe return createAutoscalingPolicyCallable().call(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Creates new autoscaling policy. * *

Sample code: - * - *


-   * try (AutoscalingPolicyServiceClient autoscalingPolicyServiceClient = AutoscalingPolicyServiceClient.create()) {
-   *   RegionName parent = RegionName.of("[PROJECT]", "[REGION]");
-   *   AutoscalingPolicy policy = AutoscalingPolicy.newBuilder().build();
-   *   CreateAutoscalingPolicyRequest request = CreateAutoscalingPolicyRequest.newBuilder()
-   *     .setParent(parent.toString())
-   *     .setPolicy(policy)
-   *     .build();
-   *   ApiFuture<AutoscalingPolicy> future = autoscalingPolicyServiceClient.createAutoscalingPolicyCallable().futureCall(request);
-   *   // Do something
-   *   AutoscalingPolicy response = future.get();
-   * }
-   * 
*/ public final UnaryCallable createAutoscalingPolicyCallable() { return stub.createAutoscalingPolicyCallable(); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Updates (replaces) autoscaling policy. * *

Disabled check for update_mask, because all updates will be full replacements. * - *

Sample code: - * - *


-   * try (AutoscalingPolicyServiceClient autoscalingPolicyServiceClient = AutoscalingPolicyServiceClient.create()) {
-   *   AutoscalingPolicy policy = AutoscalingPolicy.newBuilder().build();
-   *   AutoscalingPolicy response = autoscalingPolicyServiceClient.updateAutoscalingPolicy(policy);
-   * }
-   * 
- * * @param policy Required. The updated autoscaling policy. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -327,24 +264,12 @@ public final AutoscalingPolicy updateAutoscalingPolicy(AutoscalingPolicy policy) return updateAutoscalingPolicy(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Updates (replaces) autoscaling policy. * *

Disabled check for update_mask, because all updates will be full replacements. * - *

Sample code: - * - *


-   * try (AutoscalingPolicyServiceClient autoscalingPolicyServiceClient = AutoscalingPolicyServiceClient.create()) {
-   *   AutoscalingPolicy policy = AutoscalingPolicy.newBuilder().build();
-   *   UpdateAutoscalingPolicyRequest request = UpdateAutoscalingPolicyRequest.newBuilder()
-   *     .setPolicy(policy)
-   *     .build();
-   *   AutoscalingPolicy response = autoscalingPolicyServiceClient.updateAutoscalingPolicy(request);
-   * }
-   * 
- * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -352,52 +277,36 @@ public final AutoscalingPolicy updateAutoscalingPolicy(UpdateAutoscalingPolicyRe return updateAutoscalingPolicyCallable().call(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Updates (replaces) autoscaling policy. * *

Disabled check for update_mask, because all updates will be full replacements. * *

Sample code: - * - *


-   * try (AutoscalingPolicyServiceClient autoscalingPolicyServiceClient = AutoscalingPolicyServiceClient.create()) {
-   *   AutoscalingPolicy policy = AutoscalingPolicy.newBuilder().build();
-   *   UpdateAutoscalingPolicyRequest request = UpdateAutoscalingPolicyRequest.newBuilder()
-   *     .setPolicy(policy)
-   *     .build();
-   *   ApiFuture<AutoscalingPolicy> future = autoscalingPolicyServiceClient.updateAutoscalingPolicyCallable().futureCall(request);
-   *   // Do something
-   *   AutoscalingPolicy response = future.get();
-   * }
-   * 
*/ public final UnaryCallable updateAutoscalingPolicyCallable() { return stub.updateAutoscalingPolicyCallable(); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Retrieves autoscaling policy. * - *

Sample code: - * - *


-   * try (AutoscalingPolicyServiceClient autoscalingPolicyServiceClient = AutoscalingPolicyServiceClient.create()) {
-   *   AutoscalingPolicyName name = AutoscalingPolicyName.ofProjectLocationAutoscalingPolicyName("[PROJECT]", "[LOCATION]", "[AUTOSCALING_POLICY]");
-   *   AutoscalingPolicy response = autoscalingPolicyServiceClient.getAutoscalingPolicy(name);
-   * }
-   * 
- * * @param name Required. The "resource name" of the autoscaling policy, as described in * https://cloud.google.com/apis/design/resource_names. - *

* For `projects.regions.autoscalingPolicies.get`, the resource name of the policy - * has the following format: - * `projects/{project_id}/regions/{region}/autoscalingPolicies/{policy_id}` - *

* For `projects.locations.autoscalingPolicies.get`, the resource name of the policy - * has the following format: - * `projects/{project_id}/locations/{location}/autoscalingPolicies/{policy_id}` + *

    + *
  • For `projects.regions.autoscalingPolicies.get`, the resource name of the policy has + * the following format: + * `projects/{project_id}/regions/{region}/autoscalingPolicies/{policy_id}` + *
+ *
    + *
  • For `projects.locations.autoscalingPolicies.get`, the resource name of the policy has + * the following format: + * `projects/{project_id}/locations/{location}/autoscalingPolicies/{policy_id}` + *
+ * * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ public final AutoscalingPolicy getAutoscalingPolicy(AutoscalingPolicyName name) { @@ -408,27 +317,23 @@ public final AutoscalingPolicy getAutoscalingPolicy(AutoscalingPolicyName name) return getAutoscalingPolicy(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Retrieves autoscaling policy. * - *

Sample code: - * - *


-   * try (AutoscalingPolicyServiceClient autoscalingPolicyServiceClient = AutoscalingPolicyServiceClient.create()) {
-   *   AutoscalingPolicyName name = AutoscalingPolicyName.ofProjectLocationAutoscalingPolicyName("[PROJECT]", "[LOCATION]", "[AUTOSCALING_POLICY]");
-   *   AutoscalingPolicy response = autoscalingPolicyServiceClient.getAutoscalingPolicy(name.toString());
-   * }
-   * 
- * * @param name Required. The "resource name" of the autoscaling policy, as described in * https://cloud.google.com/apis/design/resource_names. - *

* For `projects.regions.autoscalingPolicies.get`, the resource name of the policy - * has the following format: - * `projects/{project_id}/regions/{region}/autoscalingPolicies/{policy_id}` - *

* For `projects.locations.autoscalingPolicies.get`, the resource name of the policy - * has the following format: - * `projects/{project_id}/locations/{location}/autoscalingPolicies/{policy_id}` + *

    + *
  • For `projects.regions.autoscalingPolicies.get`, the resource name of the policy has + * the following format: + * `projects/{project_id}/regions/{region}/autoscalingPolicies/{policy_id}` + *
+ *
    + *
  • For `projects.locations.autoscalingPolicies.get`, the resource name of the policy has + * the following format: + * `projects/{project_id}/locations/{location}/autoscalingPolicies/{policy_id}` + *
+ * * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ public final AutoscalingPolicy getAutoscalingPolicy(String name) { @@ -437,22 +342,10 @@ public final AutoscalingPolicy getAutoscalingPolicy(String name) { return getAutoscalingPolicy(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Retrieves autoscaling policy. * - *

Sample code: - * - *


-   * try (AutoscalingPolicyServiceClient autoscalingPolicyServiceClient = AutoscalingPolicyServiceClient.create()) {
-   *   AutoscalingPolicyName name = AutoscalingPolicyName.ofProjectLocationAutoscalingPolicyName("[PROJECT]", "[LOCATION]", "[AUTOSCALING_POLICY]");
-   *   GetAutoscalingPolicyRequest request = GetAutoscalingPolicyRequest.newBuilder()
-   *     .setName(name.toString())
-   *     .build();
-   *   AutoscalingPolicy response = autoscalingPolicyServiceClient.getAutoscalingPolicy(request);
-   * }
-   * 
- * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -460,53 +353,35 @@ public final AutoscalingPolicy getAutoscalingPolicy(GetAutoscalingPolicyRequest return getAutoscalingPolicyCallable().call(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Retrieves autoscaling policy. * *

Sample code: - * - *


-   * try (AutoscalingPolicyServiceClient autoscalingPolicyServiceClient = AutoscalingPolicyServiceClient.create()) {
-   *   AutoscalingPolicyName name = AutoscalingPolicyName.ofProjectLocationAutoscalingPolicyName("[PROJECT]", "[LOCATION]", "[AUTOSCALING_POLICY]");
-   *   GetAutoscalingPolicyRequest request = GetAutoscalingPolicyRequest.newBuilder()
-   *     .setName(name.toString())
-   *     .build();
-   *   ApiFuture<AutoscalingPolicy> future = autoscalingPolicyServiceClient.getAutoscalingPolicyCallable().futureCall(request);
-   *   // Do something
-   *   AutoscalingPolicy response = future.get();
-   * }
-   * 
*/ public final UnaryCallable getAutoscalingPolicyCallable() { return stub.getAutoscalingPolicyCallable(); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Lists autoscaling policies in the project. * - *

Sample code: - * - *


-   * try (AutoscalingPolicyServiceClient autoscalingPolicyServiceClient = AutoscalingPolicyServiceClient.create()) {
-   *   RegionName parent = RegionName.of("[PROJECT]", "[REGION]");
-   *   for (AutoscalingPolicy element : autoscalingPolicyServiceClient.listAutoscalingPolicies(parent).iterateAll()) {
-   *     // doThingsWith(element);
-   *   }
-   * }
-   * 
- * * @param parent Required. The "resource name" of the region or location, as described in * https://cloud.google.com/apis/design/resource_names. - *

* For `projects.regions.autoscalingPolicies.list`, the resource name of the region - * has the following format: `projects/{project_id}/regions/{region}` - *

* For `projects.locations.autoscalingPolicies.list`, the resource name of the - * location has the following format: `projects/{project_id}/locations/{location}` + *

    + *
  • For `projects.regions.autoscalingPolicies.list`, the resource name of the region has + * the following format: `projects/{project_id}/regions/{region}` + *
+ *
    + *
  • For `projects.locations.autoscalingPolicies.list`, the resource name of the location + * has the following format: `projects/{project_id}/locations/{location}` + *
+ * * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ - public final ListAutoscalingPoliciesPagedResponse listAutoscalingPolicies(RegionName parent) { + public final ListAutoscalingPoliciesPagedResponse listAutoscalingPolicies(LocationName parent) { ListAutoscalingPoliciesRequest request = ListAutoscalingPoliciesRequest.newBuilder() .setParent(parent == null ? null : parent.toString()) @@ -514,30 +389,24 @@ public final ListAutoscalingPoliciesPagedResponse listAutoscalingPolicies(Region return listAutoscalingPolicies(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Lists autoscaling policies in the project. * - *

Sample code: - * - *


-   * try (AutoscalingPolicyServiceClient autoscalingPolicyServiceClient = AutoscalingPolicyServiceClient.create()) {
-   *   LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]");
-   *   for (AutoscalingPolicy element : autoscalingPolicyServiceClient.listAutoscalingPolicies(parent).iterateAll()) {
-   *     // doThingsWith(element);
-   *   }
-   * }
-   * 
- * * @param parent Required. The "resource name" of the region or location, as described in * https://cloud.google.com/apis/design/resource_names. - *

* For `projects.regions.autoscalingPolicies.list`, the resource name of the region - * has the following format: `projects/{project_id}/regions/{region}` - *

* For `projects.locations.autoscalingPolicies.list`, the resource name of the - * location has the following format: `projects/{project_id}/locations/{location}` + *

    + *
  • For `projects.regions.autoscalingPolicies.list`, the resource name of the region has + * the following format: `projects/{project_id}/regions/{region}` + *
+ *
    + *
  • For `projects.locations.autoscalingPolicies.list`, the resource name of the location + * has the following format: `projects/{project_id}/locations/{location}` + *
+ * * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ - public final ListAutoscalingPoliciesPagedResponse listAutoscalingPolicies(LocationName parent) { + public final ListAutoscalingPoliciesPagedResponse listAutoscalingPolicies(RegionName parent) { ListAutoscalingPoliciesRequest request = ListAutoscalingPoliciesRequest.newBuilder() .setParent(parent == null ? null : parent.toString()) @@ -545,27 +414,21 @@ public final ListAutoscalingPoliciesPagedResponse listAutoscalingPolicies(Locati return listAutoscalingPolicies(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Lists autoscaling policies in the project. * - *

Sample code: - * - *


-   * try (AutoscalingPolicyServiceClient autoscalingPolicyServiceClient = AutoscalingPolicyServiceClient.create()) {
-   *   RegionName parent = RegionName.of("[PROJECT]", "[REGION]");
-   *   for (AutoscalingPolicy element : autoscalingPolicyServiceClient.listAutoscalingPolicies(parent.toString()).iterateAll()) {
-   *     // doThingsWith(element);
-   *   }
-   * }
-   * 
- * * @param parent Required. The "resource name" of the region or location, as described in * https://cloud.google.com/apis/design/resource_names. - *

* For `projects.regions.autoscalingPolicies.list`, the resource name of the region - * has the following format: `projects/{project_id}/regions/{region}` - *

* For `projects.locations.autoscalingPolicies.list`, the resource name of the - * location has the following format: `projects/{project_id}/locations/{location}` + *

    + *
  • For `projects.regions.autoscalingPolicies.list`, the resource name of the region has + * the following format: `projects/{project_id}/regions/{region}` + *
+ *
    + *
  • For `projects.locations.autoscalingPolicies.list`, the resource name of the location + * has the following format: `projects/{project_id}/locations/{location}` + *
+ * * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ public final ListAutoscalingPoliciesPagedResponse listAutoscalingPolicies(String parent) { @@ -574,24 +437,10 @@ public final ListAutoscalingPoliciesPagedResponse listAutoscalingPolicies(String return listAutoscalingPolicies(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Lists autoscaling policies in the project. * - *

Sample code: - * - *


-   * try (AutoscalingPolicyServiceClient autoscalingPolicyServiceClient = AutoscalingPolicyServiceClient.create()) {
-   *   RegionName parent = RegionName.of("[PROJECT]", "[REGION]");
-   *   ListAutoscalingPoliciesRequest request = ListAutoscalingPoliciesRequest.newBuilder()
-   *     .setParent(parent.toString())
-   *     .build();
-   *   for (AutoscalingPolicy element : autoscalingPolicyServiceClient.listAutoscalingPolicies(request).iterateAll()) {
-   *     // doThingsWith(element);
-   *   }
-   * }
-   * 
- * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -600,85 +449,46 @@ public final ListAutoscalingPoliciesPagedResponse listAutoscalingPolicies( return listAutoscalingPoliciesPagedCallable().call(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Lists autoscaling policies in the project. * *

Sample code: - * - *


-   * try (AutoscalingPolicyServiceClient autoscalingPolicyServiceClient = AutoscalingPolicyServiceClient.create()) {
-   *   RegionName parent = RegionName.of("[PROJECT]", "[REGION]");
-   *   ListAutoscalingPoliciesRequest request = ListAutoscalingPoliciesRequest.newBuilder()
-   *     .setParent(parent.toString())
-   *     .build();
-   *   ApiFuture<ListAutoscalingPoliciesPagedResponse> future = autoscalingPolicyServiceClient.listAutoscalingPoliciesPagedCallable().futureCall(request);
-   *   // Do something
-   *   for (AutoscalingPolicy element : future.get().iterateAll()) {
-   *     // doThingsWith(element);
-   *   }
-   * }
-   * 
*/ public final UnaryCallable listAutoscalingPoliciesPagedCallable() { return stub.listAutoscalingPoliciesPagedCallable(); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Lists autoscaling policies in the project. * *

Sample code: - * - *


-   * try (AutoscalingPolicyServiceClient autoscalingPolicyServiceClient = AutoscalingPolicyServiceClient.create()) {
-   *   RegionName parent = RegionName.of("[PROJECT]", "[REGION]");
-   *   ListAutoscalingPoliciesRequest request = ListAutoscalingPoliciesRequest.newBuilder()
-   *     .setParent(parent.toString())
-   *     .build();
-   *   while (true) {
-   *     ListAutoscalingPoliciesResponse response = autoscalingPolicyServiceClient.listAutoscalingPoliciesCallable().call(request);
-   *     for (AutoscalingPolicy element : response.getPoliciesList()) {
-   *       // doThingsWith(element);
-   *     }
-   *     String nextPageToken = response.getNextPageToken();
-   *     if (!Strings.isNullOrEmpty(nextPageToken)) {
-   *       request = request.toBuilder().setPageToken(nextPageToken).build();
-   *     } else {
-   *       break;
-   *     }
-   *   }
-   * }
-   * 
*/ public final UnaryCallable listAutoscalingPoliciesCallable() { return stub.listAutoscalingPoliciesCallable(); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Deletes an autoscaling policy. It is an error to delete an autoscaling policy that is in use by * one or more clusters. * - *

Sample code: - * - *


-   * try (AutoscalingPolicyServiceClient autoscalingPolicyServiceClient = AutoscalingPolicyServiceClient.create()) {
-   *   AutoscalingPolicyName name = AutoscalingPolicyName.ofProjectLocationAutoscalingPolicyName("[PROJECT]", "[LOCATION]", "[AUTOSCALING_POLICY]");
-   *   autoscalingPolicyServiceClient.deleteAutoscalingPolicy(name);
-   * }
-   * 
- * * @param name Required. The "resource name" of the autoscaling policy, as described in * https://cloud.google.com/apis/design/resource_names. - *

* For `projects.regions.autoscalingPolicies.delete`, the resource name of the policy - * has the following format: - * `projects/{project_id}/regions/{region}/autoscalingPolicies/{policy_id}` - *

* For `projects.locations.autoscalingPolicies.delete`, the resource name of the - * policy has the following format: - * `projects/{project_id}/locations/{location}/autoscalingPolicies/{policy_id}` + *

    + *
  • For `projects.regions.autoscalingPolicies.delete`, the resource name of the policy + * has the following format: + * `projects/{project_id}/regions/{region}/autoscalingPolicies/{policy_id}` + *
+ *
    + *
  • For `projects.locations.autoscalingPolicies.delete`, the resource name of the policy + * has the following format: + * `projects/{project_id}/locations/{location}/autoscalingPolicies/{policy_id}` + *
+ * * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ public final void deleteAutoscalingPolicy(AutoscalingPolicyName name) { @@ -689,28 +499,24 @@ public final void deleteAutoscalingPolicy(AutoscalingPolicyName name) { deleteAutoscalingPolicy(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Deletes an autoscaling policy. It is an error to delete an autoscaling policy that is in use by * one or more clusters. * - *

Sample code: - * - *


-   * try (AutoscalingPolicyServiceClient autoscalingPolicyServiceClient = AutoscalingPolicyServiceClient.create()) {
-   *   AutoscalingPolicyName name = AutoscalingPolicyName.ofProjectLocationAutoscalingPolicyName("[PROJECT]", "[LOCATION]", "[AUTOSCALING_POLICY]");
-   *   autoscalingPolicyServiceClient.deleteAutoscalingPolicy(name.toString());
-   * }
-   * 
- * * @param name Required. The "resource name" of the autoscaling policy, as described in * https://cloud.google.com/apis/design/resource_names. - *

* For `projects.regions.autoscalingPolicies.delete`, the resource name of the policy - * has the following format: - * `projects/{project_id}/regions/{region}/autoscalingPolicies/{policy_id}` - *

* For `projects.locations.autoscalingPolicies.delete`, the resource name of the - * policy has the following format: - * `projects/{project_id}/locations/{location}/autoscalingPolicies/{policy_id}` + *

    + *
  • For `projects.regions.autoscalingPolicies.delete`, the resource name of the policy + * has the following format: + * `projects/{project_id}/regions/{region}/autoscalingPolicies/{policy_id}` + *
+ *
    + *
  • For `projects.locations.autoscalingPolicies.delete`, the resource name of the policy + * has the following format: + * `projects/{project_id}/locations/{location}/autoscalingPolicies/{policy_id}` + *
+ * * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ public final void deleteAutoscalingPolicy(String name) { @@ -719,23 +525,11 @@ public final void deleteAutoscalingPolicy(String name) { deleteAutoscalingPolicy(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Deletes an autoscaling policy. It is an error to delete an autoscaling policy that is in use by * one or more clusters. * - *

Sample code: - * - *


-   * try (AutoscalingPolicyServiceClient autoscalingPolicyServiceClient = AutoscalingPolicyServiceClient.create()) {
-   *   AutoscalingPolicyName name = AutoscalingPolicyName.ofProjectLocationAutoscalingPolicyName("[PROJECT]", "[LOCATION]", "[AUTOSCALING_POLICY]");
-   *   DeleteAutoscalingPolicyRequest request = DeleteAutoscalingPolicyRequest.newBuilder()
-   *     .setName(name.toString())
-   *     .build();
-   *   autoscalingPolicyServiceClient.deleteAutoscalingPolicy(request);
-   * }
-   * 
- * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -743,24 +537,12 @@ public final void deleteAutoscalingPolicy(DeleteAutoscalingPolicyRequest request deleteAutoscalingPolicyCallable().call(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Deletes an autoscaling policy. It is an error to delete an autoscaling policy that is in use by * one or more clusters. * *

Sample code: - * - *


-   * try (AutoscalingPolicyServiceClient autoscalingPolicyServiceClient = AutoscalingPolicyServiceClient.create()) {
-   *   AutoscalingPolicyName name = AutoscalingPolicyName.ofProjectLocationAutoscalingPolicyName("[PROJECT]", "[LOCATION]", "[AUTOSCALING_POLICY]");
-   *   DeleteAutoscalingPolicyRequest request = DeleteAutoscalingPolicyRequest.newBuilder()
-   *     .setName(name.toString())
-   *     .build();
-   *   ApiFuture<Void> future = autoscalingPolicyServiceClient.deleteAutoscalingPolicyCallable().futureCall(request);
-   *   // Do something
-   *   future.get();
-   * }
-   * 
*/ public final UnaryCallable deleteAutoscalingPolicyCallable() { diff --git a/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/AutoscalingPolicyServiceSettings.java b/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/AutoscalingPolicyServiceSettings.java index 36d59fbf..1ca5daa8 100644 --- a/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/AutoscalingPolicyServiceSettings.java +++ b/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/AutoscalingPolicyServiceSettings.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,6 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.dataproc.v1; import static com.google.cloud.dataproc.v1.AutoscalingPolicyServiceClient.ListAutoscalingPoliciesPagedResponse; @@ -34,7 +35,7 @@ import java.util.List; import javax.annotation.Generated; -// AUTO-GENERATED DOCUMENTATION AND CLASS +// AUTO-GENERATED DOCUMENTATION AND CLASS. /** * Settings class to configure an instance of {@link AutoscalingPolicyServiceClient}. * @@ -51,24 +52,26 @@ * *

For example, to set the total timeout of createAutoscalingPolicy to 30 seconds: * - *

- * 
+ * 
{@code
  * AutoscalingPolicyServiceSettings.Builder autoscalingPolicyServiceSettingsBuilder =
  *     AutoscalingPolicyServiceSettings.newBuilder();
  * autoscalingPolicyServiceSettingsBuilder
  *     .createAutoscalingPolicySettings()
  *     .setRetrySettings(
- *         autoscalingPolicyServiceSettingsBuilder.createAutoscalingPolicySettings().getRetrySettings().toBuilder()
+ *         autoscalingPolicyServiceSettingsBuilder
+ *             .createAutoscalingPolicySettings()
+ *             .getRetrySettings()
+ *             .toBuilder()
  *             .setTotalTimeout(Duration.ofSeconds(30))
  *             .build());
- * AutoscalingPolicyServiceSettings autoscalingPolicyServiceSettings = autoscalingPolicyServiceSettingsBuilder.build();
- * 
- * 
+ * AutoscalingPolicyServiceSettings autoscalingPolicyServiceSettings = + * autoscalingPolicyServiceSettingsBuilder.build(); + * }
*/ -@Generated("by gapic-generator") -@BetaApi +@Generated("by gapic-generator-java") public class AutoscalingPolicyServiceSettings extends ClientSettings { + /** Returns the object with the settings used for calls to createAutoscalingPolicy. */ public UnaryCallSettings createAutoscalingPolicySettings() { @@ -168,18 +171,15 @@ protected AutoscalingPolicyServiceSettings(Builder settingsBuilder) throws IOExc /** Builder for AutoscalingPolicyServiceSettings. */ public static class Builder extends ClientSettings.Builder { + protected Builder() throws IOException { - this((ClientContext) null); + this(((ClientContext) null)); } protected Builder(ClientContext clientContext) { super(AutoscalingPolicyServiceStubSettings.newBuilder(clientContext)); } - private static Builder createDefault() { - return new Builder(AutoscalingPolicyServiceStubSettings.newBuilder()); - } - protected Builder(AutoscalingPolicyServiceSettings settings) { super(settings.getStubSettings().toBuilder()); } @@ -188,11 +188,15 @@ protected Builder(AutoscalingPolicyServiceStubSettings.Builder stubSettings) { super(stubSettings); } + private static Builder createDefault() { + return new Builder(AutoscalingPolicyServiceStubSettings.newBuilder()); + } + public AutoscalingPolicyServiceStubSettings.Builder getStubSettingsBuilder() { return ((AutoscalingPolicyServiceStubSettings.Builder) getStubSettings()); } - // NEXT_MAJOR_VER: remove 'throws Exception' + // NEXT_MAJOR_VER: remove 'throws Exception'. /** * Applies the given settings updater function to all of the unary API methods in this service. * diff --git a/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/ClusterControllerClient.java b/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/ClusterControllerClient.java index 371777be..ae80f94e 100644 --- a/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/ClusterControllerClient.java +++ b/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/ClusterControllerClient.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,6 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.dataproc.v1; import com.google.api.core.ApiFunction; @@ -39,7 +40,7 @@ import java.util.concurrent.TimeUnit; import javax.annotation.Generated; -// AUTO-GENERATED DOCUMENTATION AND SERVICE +// AUTO-GENERATED DOCUMENTATION AND CLASS. /** * Service Description: The ClusterControllerService provides methods to manage clusters of Compute * Engine instances. @@ -47,18 +48,7 @@ *

This class provides the ability to make remote calls to the backing service through method * calls that map to API methods. Sample code to get started: * - *

- * 
- * try (ClusterControllerClient clusterControllerClient = ClusterControllerClient.create()) {
- *   String projectId = "";
- *   String region = "";
- *   String clusterName = "";
- *   Cluster response = clusterControllerClient.getCluster(projectId, region, clusterName);
- * }
- * 
- * 
- * - *

Note: close() needs to be called on the clusterControllerClient object to clean up resources + *

Note: close() needs to be called on the ClusterControllerClient object to clean up resources * such as threads. In the example above, try-with-resources is used, which automatically calls * close(). * @@ -87,30 +77,28 @@ * *

To customize credentials: * - *

- * 
+ * 
{@code
  * ClusterControllerSettings clusterControllerSettings =
  *     ClusterControllerSettings.newBuilder()
  *         .setCredentialsProvider(FixedCredentialsProvider.create(myCredentials))
  *         .build();
  * ClusterControllerClient clusterControllerClient =
  *     ClusterControllerClient.create(clusterControllerSettings);
- * 
- * 
+ * }
* - * To customize the endpoint: + *

To customize the endpoint: * - *

- * 
+ * 
{@code
  * ClusterControllerSettings clusterControllerSettings =
  *     ClusterControllerSettings.newBuilder().setEndpoint(myEndpoint).build();
  * ClusterControllerClient clusterControllerClient =
  *     ClusterControllerClient.create(clusterControllerSettings);
- * 
- * 
+ * }
+ * + *

Please refer to the GitHub repository's samples for more quickstart code snippets. */ -@Generated("by gapic-generator") @BetaApi +@Generated("by gapic-generator") public class ClusterControllerClient implements BackgroundResource { private final ClusterControllerSettings settings; private final ClusterControllerStub stub; @@ -132,7 +120,7 @@ public static final ClusterControllerClient create(ClusterControllerSettings set /** * Constructs an instance of ClusterControllerClient, using the given stub for making calls. This - * is for advanced usage - prefer to use ClusterControllerSettings}. + * is for advanced usage - prefer using create(ClusterControllerSettings). */ @BetaApi("A restructuring of stub classes is planned, so this may break in the future") public static final ClusterControllerClient create(ClusterControllerStub stub) { @@ -170,37 +158,22 @@ public ClusterControllerStub getStub() { * Returns the OperationsClient that can be used to query the status of a long-running operation * returned by another API method call. */ - @BetaApi( - "The surface for long-running operations is not stable yet and may change in the future.") public final OperationsClient getOperationsClient() { return operationsClient; } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Creates a cluster in a project. The returned * [Operation.metadata][google.longrunning.Operation.metadata] will be * [ClusterOperationMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#clusteroperationmetadata). * - *

Sample code: - * - *


-   * try (ClusterControllerClient clusterControllerClient = ClusterControllerClient.create()) {
-   *   String projectId = "";
-   *   String region = "";
-   *   Cluster cluster = Cluster.newBuilder().build();
-   *   Cluster response = clusterControllerClient.createClusterAsync(projectId, region, cluster).get();
-   * }
-   * 
- * * @param projectId Required. The ID of the Google Cloud Platform project that the cluster belongs * to. * @param region Required. The Dataproc region in which to handle the request. * @param cluster Required. The cluster to create. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ - @BetaApi( - "The surface for long-running operations is not stable yet and may change in the future.") public final OperationFuture createClusterAsync( String projectId, String region, Cluster cluster) { CreateClusterRequest request = @@ -212,115 +185,51 @@ public final OperationFuture createClusterAsy return createClusterAsync(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Creates a cluster in a project. The returned * [Operation.metadata][google.longrunning.Operation.metadata] will be * [ClusterOperationMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#clusteroperationmetadata). * - *

Sample code: - * - *


-   * try (ClusterControllerClient clusterControllerClient = ClusterControllerClient.create()) {
-   *   String projectId = "";
-   *   String region = "";
-   *   Cluster cluster = Cluster.newBuilder().build();
-   *   CreateClusterRequest request = CreateClusterRequest.newBuilder()
-   *     .setProjectId(projectId)
-   *     .setRegion(region)
-   *     .setCluster(cluster)
-   *     .build();
-   *   Cluster response = clusterControllerClient.createClusterAsync(request).get();
-   * }
-   * 
- * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ - @BetaApi( - "The surface for long-running operations is not stable yet and may change in the future.") public final OperationFuture createClusterAsync( CreateClusterRequest request) { return createClusterOperationCallable().futureCall(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Creates a cluster in a project. The returned * [Operation.metadata][google.longrunning.Operation.metadata] will be * [ClusterOperationMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#clusteroperationmetadata). * *

Sample code: - * - *


-   * try (ClusterControllerClient clusterControllerClient = ClusterControllerClient.create()) {
-   *   String projectId = "";
-   *   String region = "";
-   *   Cluster cluster = Cluster.newBuilder().build();
-   *   CreateClusterRequest request = CreateClusterRequest.newBuilder()
-   *     .setProjectId(projectId)
-   *     .setRegion(region)
-   *     .setCluster(cluster)
-   *     .build();
-   *   OperationFuture<Cluster, ClusterOperationMetadata> future = clusterControllerClient.createClusterOperationCallable().futureCall(request);
-   *   // Do something
-   *   Cluster response = future.get();
-   * }
-   * 
*/ - @BetaApi("The surface for use by generated code is not stable yet and may change in the future.") public final OperationCallable createClusterOperationCallable() { return stub.createClusterOperationCallable(); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Creates a cluster in a project. The returned * [Operation.metadata][google.longrunning.Operation.metadata] will be * [ClusterOperationMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#clusteroperationmetadata). * *

Sample code: - * - *


-   * try (ClusterControllerClient clusterControllerClient = ClusterControllerClient.create()) {
-   *   String projectId = "";
-   *   String region = "";
-   *   Cluster cluster = Cluster.newBuilder().build();
-   *   CreateClusterRequest request = CreateClusterRequest.newBuilder()
-   *     .setProjectId(projectId)
-   *     .setRegion(region)
-   *     .setCluster(cluster)
-   *     .build();
-   *   ApiFuture<Operation> future = clusterControllerClient.createClusterCallable().futureCall(request);
-   *   // Do something
-   *   Operation response = future.get();
-   * }
-   * 
*/ public final UnaryCallable createClusterCallable() { return stub.createClusterCallable(); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Updates a cluster in a project. The returned * [Operation.metadata][google.longrunning.Operation.metadata] will be * [ClusterOperationMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#clusteroperationmetadata). * - *

Sample code: - * - *


-   * try (ClusterControllerClient clusterControllerClient = ClusterControllerClient.create()) {
-   *   String projectId = "";
-   *   String region = "";
-   *   String clusterName = "";
-   *   Cluster cluster = Cluster.newBuilder().build();
-   *   FieldMask updateMask = FieldMask.newBuilder().build();
-   *   Cluster response = clusterControllerClient.updateClusterAsync(projectId, region, clusterName, cluster, updateMask).get();
-   * }
-   * 
- * * @param projectId Required. The ID of the Google Cloud Platform project the cluster belongs to. * @param region Required. The Dataproc region in which to handle the request. * @param clusterName Required. The cluster name. @@ -348,8 +257,6 @@ public final UnaryCallable createClusterCallabl * change autoscaling policies</td> </tr> </tbody> </table> * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ - @BetaApi( - "The surface for long-running operations is not stable yet and may change in the future.") public final OperationFuture updateClusterAsync( String projectId, String region, String clusterName, Cluster cluster, FieldMask updateMask) { UpdateClusterRequest request = @@ -363,133 +270,57 @@ public final OperationFuture updateClusterAsy return updateClusterAsync(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Updates a cluster in a project. The returned * [Operation.metadata][google.longrunning.Operation.metadata] will be * [ClusterOperationMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#clusteroperationmetadata). * - *

Sample code: - * - *


-   * try (ClusterControllerClient clusterControllerClient = ClusterControllerClient.create()) {
-   *   String projectId = "";
-   *   String region = "";
-   *   String clusterName = "";
-   *   Cluster cluster = Cluster.newBuilder().build();
-   *   FieldMask updateMask = FieldMask.newBuilder().build();
-   *   UpdateClusterRequest request = UpdateClusterRequest.newBuilder()
-   *     .setProjectId(projectId)
-   *     .setRegion(region)
-   *     .setClusterName(clusterName)
-   *     .setCluster(cluster)
-   *     .setUpdateMask(updateMask)
-   *     .build();
-   *   Cluster response = clusterControllerClient.updateClusterAsync(request).get();
-   * }
-   * 
- * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ - @BetaApi( - "The surface for long-running operations is not stable yet and may change in the future.") public final OperationFuture updateClusterAsync( UpdateClusterRequest request) { return updateClusterOperationCallable().futureCall(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Updates a cluster in a project. The returned * [Operation.metadata][google.longrunning.Operation.metadata] will be * [ClusterOperationMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#clusteroperationmetadata). * *

Sample code: - * - *


-   * try (ClusterControllerClient clusterControllerClient = ClusterControllerClient.create()) {
-   *   String projectId = "";
-   *   String region = "";
-   *   String clusterName = "";
-   *   Cluster cluster = Cluster.newBuilder().build();
-   *   FieldMask updateMask = FieldMask.newBuilder().build();
-   *   UpdateClusterRequest request = UpdateClusterRequest.newBuilder()
-   *     .setProjectId(projectId)
-   *     .setRegion(region)
-   *     .setClusterName(clusterName)
-   *     .setCluster(cluster)
-   *     .setUpdateMask(updateMask)
-   *     .build();
-   *   OperationFuture<Cluster, ClusterOperationMetadata> future = clusterControllerClient.updateClusterOperationCallable().futureCall(request);
-   *   // Do something
-   *   Cluster response = future.get();
-   * }
-   * 
*/ - @BetaApi("The surface for use by generated code is not stable yet and may change in the future.") public final OperationCallable updateClusterOperationCallable() { return stub.updateClusterOperationCallable(); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Updates a cluster in a project. The returned * [Operation.metadata][google.longrunning.Operation.metadata] will be * [ClusterOperationMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#clusteroperationmetadata). * *

Sample code: - * - *


-   * try (ClusterControllerClient clusterControllerClient = ClusterControllerClient.create()) {
-   *   String projectId = "";
-   *   String region = "";
-   *   String clusterName = "";
-   *   Cluster cluster = Cluster.newBuilder().build();
-   *   FieldMask updateMask = FieldMask.newBuilder().build();
-   *   UpdateClusterRequest request = UpdateClusterRequest.newBuilder()
-   *     .setProjectId(projectId)
-   *     .setRegion(region)
-   *     .setClusterName(clusterName)
-   *     .setCluster(cluster)
-   *     .setUpdateMask(updateMask)
-   *     .build();
-   *   ApiFuture<Operation> future = clusterControllerClient.updateClusterCallable().futureCall(request);
-   *   // Do something
-   *   Operation response = future.get();
-   * }
-   * 
*/ public final UnaryCallable updateClusterCallable() { return stub.updateClusterCallable(); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Deletes a cluster in a project. The returned * [Operation.metadata][google.longrunning.Operation.metadata] will be * [ClusterOperationMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#clusteroperationmetadata). * - *

Sample code: - * - *


-   * try (ClusterControllerClient clusterControllerClient = ClusterControllerClient.create()) {
-   *   String projectId = "";
-   *   String region = "";
-   *   String clusterName = "";
-   *   clusterControllerClient.deleteClusterAsync(projectId, region, clusterName).get();
-   * }
-   * 
- * * @param projectId Required. The ID of the Google Cloud Platform project that the cluster belongs * to. * @param region Required. The Dataproc region in which to handle the request. * @param clusterName Required. The cluster name. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ - @BetaApi( - "The surface for long-running operations is not stable yet and may change in the future.") public final OperationFuture deleteClusterAsync( String projectId, String region, String clusterName) { DeleteClusterRequest request = @@ -501,250 +332,49 @@ public final OperationFuture deleteClusterAsync return deleteClusterAsync(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Deletes a cluster in a project. The returned * [Operation.metadata][google.longrunning.Operation.metadata] will be * [ClusterOperationMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#clusteroperationmetadata). * - *

Sample code: - * - *


-   * try (ClusterControllerClient clusterControllerClient = ClusterControllerClient.create()) {
-   *   String projectId = "";
-   *   String region = "";
-   *   String clusterName = "";
-   *   DeleteClusterRequest request = DeleteClusterRequest.newBuilder()
-   *     .setProjectId(projectId)
-   *     .setRegion(region)
-   *     .setClusterName(clusterName)
-   *     .build();
-   *   clusterControllerClient.deleteClusterAsync(request).get();
-   * }
-   * 
- * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ - @BetaApi( - "The surface for long-running operations is not stable yet and may change in the future.") public final OperationFuture deleteClusterAsync( DeleteClusterRequest request) { return deleteClusterOperationCallable().futureCall(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Deletes a cluster in a project. The returned * [Operation.metadata][google.longrunning.Operation.metadata] will be * [ClusterOperationMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#clusteroperationmetadata). * *

Sample code: - * - *


-   * try (ClusterControllerClient clusterControllerClient = ClusterControllerClient.create()) {
-   *   String projectId = "";
-   *   String region = "";
-   *   String clusterName = "";
-   *   DeleteClusterRequest request = DeleteClusterRequest.newBuilder()
-   *     .setProjectId(projectId)
-   *     .setRegion(region)
-   *     .setClusterName(clusterName)
-   *     .build();
-   *   OperationFuture<Empty, ClusterOperationMetadata> future = clusterControllerClient.deleteClusterOperationCallable().futureCall(request);
-   *   // Do something
-   *   future.get();
-   * }
-   * 
*/ - @BetaApi("The surface for use by generated code is not stable yet and may change in the future.") public final OperationCallable deleteClusterOperationCallable() { return stub.deleteClusterOperationCallable(); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Deletes a cluster in a project. The returned * [Operation.metadata][google.longrunning.Operation.metadata] will be * [ClusterOperationMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#clusteroperationmetadata). * *

Sample code: - * - *


-   * try (ClusterControllerClient clusterControllerClient = ClusterControllerClient.create()) {
-   *   String projectId = "";
-   *   String region = "";
-   *   String clusterName = "";
-   *   DeleteClusterRequest request = DeleteClusterRequest.newBuilder()
-   *     .setProjectId(projectId)
-   *     .setRegion(region)
-   *     .setClusterName(clusterName)
-   *     .build();
-   *   ApiFuture<Operation> future = clusterControllerClient.deleteClusterCallable().futureCall(request);
-   *   // Do something
-   *   future.get();
-   * }
-   * 
*/ public final UnaryCallable deleteClusterCallable() { return stub.deleteClusterCallable(); } - // AUTO-GENERATED DOCUMENTATION AND METHOD - /** - * Gets cluster diagnostic information. The returned - * [Operation.metadata][google.longrunning.Operation.metadata] will be - * [ClusterOperationMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#clusteroperationmetadata). - * After the operation completes, [Operation.response][google.longrunning.Operation.response] - * contains - * [DiagnoseClusterResults](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#diagnoseclusterresults). - * - *

Sample code: - * - *


-   * try (ClusterControllerClient clusterControllerClient = ClusterControllerClient.create()) {
-   *   String projectId = "";
-   *   String region = "";
-   *   String clusterName = "";
-   *   DiagnoseClusterResults response = clusterControllerClient.diagnoseClusterAsync(projectId, region, clusterName).get();
-   * }
-   * 
- * - * @param projectId Required. The ID of the Google Cloud Platform project that the cluster belongs - * to. - * @param region Required. The Dataproc region in which to handle the request. - * @param clusterName Required. The cluster name. - * @throws com.google.api.gax.rpc.ApiException if the remote call fails - */ - @BetaApi( - "The surface for long-running operations is not stable yet and may change in the future.") - public final OperationFuture - diagnoseClusterAsync(String projectId, String region, String clusterName) { - DiagnoseClusterRequest request = - DiagnoseClusterRequest.newBuilder() - .setProjectId(projectId) - .setRegion(region) - .setClusterName(clusterName) - .build(); - return diagnoseClusterAsync(request); - } - - // AUTO-GENERATED DOCUMENTATION AND METHOD - /** - * Gets cluster diagnostic information. The returned - * [Operation.metadata][google.longrunning.Operation.metadata] will be - * [ClusterOperationMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#clusteroperationmetadata). - * After the operation completes, [Operation.response][google.longrunning.Operation.response] - * contains - * [DiagnoseClusterResults](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#diagnoseclusterresults). - * - *

Sample code: - * - *


-   * try (ClusterControllerClient clusterControllerClient = ClusterControllerClient.create()) {
-   *   String projectId = "";
-   *   String region = "";
-   *   String clusterName = "";
-   *   DiagnoseClusterRequest request = DiagnoseClusterRequest.newBuilder()
-   *     .setProjectId(projectId)
-   *     .setRegion(region)
-   *     .setClusterName(clusterName)
-   *     .build();
-   *   DiagnoseClusterResults response = clusterControllerClient.diagnoseClusterAsync(request).get();
-   * }
-   * 
- * - * @param request The request object containing all of the parameters for the API call. - * @throws com.google.api.gax.rpc.ApiException if the remote call fails - */ - @BetaApi( - "The surface for long-running operations is not stable yet and may change in the future.") - public final OperationFuture - diagnoseClusterAsync(DiagnoseClusterRequest request) { - return diagnoseClusterOperationCallable().futureCall(request); - } - - // AUTO-GENERATED DOCUMENTATION AND METHOD - /** - * Gets cluster diagnostic information. The returned - * [Operation.metadata][google.longrunning.Operation.metadata] will be - * [ClusterOperationMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#clusteroperationmetadata). - * After the operation completes, [Operation.response][google.longrunning.Operation.response] - * contains - * [DiagnoseClusterResults](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#diagnoseclusterresults). - * - *

Sample code: - * - *


-   * try (ClusterControllerClient clusterControllerClient = ClusterControllerClient.create()) {
-   *   String projectId = "";
-   *   String region = "";
-   *   String clusterName = "";
-   *   DiagnoseClusterRequest request = DiagnoseClusterRequest.newBuilder()
-   *     .setProjectId(projectId)
-   *     .setRegion(region)
-   *     .setClusterName(clusterName)
-   *     .build();
-   *   OperationFuture<DiagnoseClusterResults, ClusterOperationMetadata> future = clusterControllerClient.diagnoseClusterOperationCallable().futureCall(request);
-   *   // Do something
-   *   DiagnoseClusterResults response = future.get();
-   * }
-   * 
- */ - @BetaApi("The surface for use by generated code is not stable yet and may change in the future.") - public final OperationCallable< - DiagnoseClusterRequest, DiagnoseClusterResults, ClusterOperationMetadata> - diagnoseClusterOperationCallable() { - return stub.diagnoseClusterOperationCallable(); - } - - // AUTO-GENERATED DOCUMENTATION AND METHOD - /** - * Gets cluster diagnostic information. The returned - * [Operation.metadata][google.longrunning.Operation.metadata] will be - * [ClusterOperationMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#clusteroperationmetadata). - * After the operation completes, [Operation.response][google.longrunning.Operation.response] - * contains - * [DiagnoseClusterResults](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#diagnoseclusterresults). - * - *

Sample code: - * - *


-   * try (ClusterControllerClient clusterControllerClient = ClusterControllerClient.create()) {
-   *   String projectId = "";
-   *   String region = "";
-   *   String clusterName = "";
-   *   DiagnoseClusterRequest request = DiagnoseClusterRequest.newBuilder()
-   *     .setProjectId(projectId)
-   *     .setRegion(region)
-   *     .setClusterName(clusterName)
-   *     .build();
-   *   ApiFuture<Operation> future = clusterControllerClient.diagnoseClusterCallable().futureCall(request);
-   *   // Do something
-   *   Operation response = future.get();
-   * }
-   * 
- */ - public final UnaryCallable diagnoseClusterCallable() { - return stub.diagnoseClusterCallable(); - } - - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Gets the resource representation for a cluster in a project. * - *

Sample code: - * - *


-   * try (ClusterControllerClient clusterControllerClient = ClusterControllerClient.create()) {
-   *   String projectId = "";
-   *   String region = "";
-   *   String clusterName = "";
-   *   Cluster response = clusterControllerClient.getCluster(projectId, region, clusterName);
-   * }
-   * 
- * * @param projectId Required. The ID of the Google Cloud Platform project that the cluster belongs * to. * @param region Required. The Dataproc region in which to handle the request. @@ -761,26 +391,10 @@ public final Cluster getCluster(String projectId, String region, String clusterN return getCluster(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Gets the resource representation for a cluster in a project. * - *

Sample code: - * - *


-   * try (ClusterControllerClient clusterControllerClient = ClusterControllerClient.create()) {
-   *   String projectId = "";
-   *   String region = "";
-   *   String clusterName = "";
-   *   GetClusterRequest request = GetClusterRequest.newBuilder()
-   *     .setProjectId(projectId)
-   *     .setRegion(region)
-   *     .setClusterName(clusterName)
-   *     .build();
-   *   Cluster response = clusterControllerClient.getCluster(request);
-   * }
-   * 
- * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -788,48 +402,20 @@ public final Cluster getCluster(GetClusterRequest request) { return getClusterCallable().call(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Gets the resource representation for a cluster in a project. * *

Sample code: - * - *


-   * try (ClusterControllerClient clusterControllerClient = ClusterControllerClient.create()) {
-   *   String projectId = "";
-   *   String region = "";
-   *   String clusterName = "";
-   *   GetClusterRequest request = GetClusterRequest.newBuilder()
-   *     .setProjectId(projectId)
-   *     .setRegion(region)
-   *     .setClusterName(clusterName)
-   *     .build();
-   *   ApiFuture<Cluster> future = clusterControllerClient.getClusterCallable().futureCall(request);
-   *   // Do something
-   *   Cluster response = future.get();
-   * }
-   * 
*/ public final UnaryCallable getClusterCallable() { return stub.getClusterCallable(); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Lists all regions/{region}/clusters in a project alphabetically. * - *

Sample code: - * - *


-   * try (ClusterControllerClient clusterControllerClient = ClusterControllerClient.create()) {
-   *   String projectId = "";
-   *   String region = "";
-   *   for (Cluster element : clusterControllerClient.listClusters(projectId, region).iterateAll()) {
-   *     // doThingsWith(element);
-   *   }
-   * }
-   * 
- * * @param projectId Required. The ID of the Google Cloud Platform project that the cluster belongs * to. * @param region Required. The Dataproc region in which to handle the request. @@ -841,23 +427,10 @@ public final ListClustersPagedResponse listClusters(String projectId, String reg return listClusters(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Lists all regions/{region}/clusters in a project alphabetically. * - *

Sample code: - * - *


-   * try (ClusterControllerClient clusterControllerClient = ClusterControllerClient.create()) {
-   *   String projectId = "";
-   *   String region = "";
-   *   String filter = "";
-   *   for (Cluster element : clusterControllerClient.listClusters(projectId, region, filter).iterateAll()) {
-   *     // doThingsWith(element);
-   *   }
-   * }
-   * 
- * * @param projectId Required. The ID of the Google Cloud Platform project that the cluster belongs * to. * @param region Required. The Dataproc region in which to handle the request. @@ -888,26 +461,10 @@ public final ListClustersPagedResponse listClusters( return listClusters(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Lists all regions/{region}/clusters in a project alphabetically. * - *

Sample code: - * - *


-   * try (ClusterControllerClient clusterControllerClient = ClusterControllerClient.create()) {
-   *   String projectId = "";
-   *   String region = "";
-   *   ListClustersRequest request = ListClustersRequest.newBuilder()
-   *     .setProjectId(projectId)
-   *     .setRegion(region)
-   *     .build();
-   *   for (Cluster element : clusterControllerClient.listClusters(request).iterateAll()) {
-   *     // doThingsWith(element);
-   *   }
-   * }
-   * 
- * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -915,66 +472,102 @@ public final ListClustersPagedResponse listClusters(ListClustersRequest request) return listClustersPagedCallable().call(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Lists all regions/{region}/clusters in a project alphabetically. * *

Sample code: - * - *


-   * try (ClusterControllerClient clusterControllerClient = ClusterControllerClient.create()) {
-   *   String projectId = "";
-   *   String region = "";
-   *   ListClustersRequest request = ListClustersRequest.newBuilder()
-   *     .setProjectId(projectId)
-   *     .setRegion(region)
-   *     .build();
-   *   ApiFuture<ListClustersPagedResponse> future = clusterControllerClient.listClustersPagedCallable().futureCall(request);
-   *   // Do something
-   *   for (Cluster element : future.get().iterateAll()) {
-   *     // doThingsWith(element);
-   *   }
-   * }
-   * 
*/ public final UnaryCallable listClustersPagedCallable() { return stub.listClustersPagedCallable(); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Lists all regions/{region}/clusters in a project alphabetically. * *

Sample code: - * - *


-   * try (ClusterControllerClient clusterControllerClient = ClusterControllerClient.create()) {
-   *   String projectId = "";
-   *   String region = "";
-   *   ListClustersRequest request = ListClustersRequest.newBuilder()
-   *     .setProjectId(projectId)
-   *     .setRegion(region)
-   *     .build();
-   *   while (true) {
-   *     ListClustersResponse response = clusterControllerClient.listClustersCallable().call(request);
-   *     for (Cluster element : response.getClustersList()) {
-   *       // doThingsWith(element);
-   *     }
-   *     String nextPageToken = response.getNextPageToken();
-   *     if (!Strings.isNullOrEmpty(nextPageToken)) {
-   *       request = request.toBuilder().setPageToken(nextPageToken).build();
-   *     } else {
-   *       break;
-   *     }
-   *   }
-   * }
-   * 
*/ public final UnaryCallable listClustersCallable() { return stub.listClustersCallable(); } + // AUTO-GENERATED DOCUMENTATION AND METHOD. + /** + * Gets cluster diagnostic information. The returned + * [Operation.metadata][google.longrunning.Operation.metadata] will be + * [ClusterOperationMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#clusteroperationmetadata). + * After the operation completes, [Operation.response][google.longrunning.Operation.response] + * contains + * [DiagnoseClusterResults](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#diagnoseclusterresults). + * + * @param projectId Required. The ID of the Google Cloud Platform project that the cluster belongs + * to. + * @param region Required. The Dataproc region in which to handle the request. + * @param clusterName Required. The cluster name. + * @throws com.google.api.gax.rpc.ApiException if the remote call fails + */ + public final OperationFuture + diagnoseClusterAsync(String projectId, String region, String clusterName) { + DiagnoseClusterRequest request = + DiagnoseClusterRequest.newBuilder() + .setProjectId(projectId) + .setRegion(region) + .setClusterName(clusterName) + .build(); + return diagnoseClusterAsync(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD. + /** + * Gets cluster diagnostic information. The returned + * [Operation.metadata][google.longrunning.Operation.metadata] will be + * [ClusterOperationMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#clusteroperationmetadata). + * After the operation completes, [Operation.response][google.longrunning.Operation.response] + * contains + * [DiagnoseClusterResults](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#diagnoseclusterresults). + * + * @param request The request object containing all of the parameters for the API call. + * @throws com.google.api.gax.rpc.ApiException if the remote call fails + */ + public final OperationFuture + diagnoseClusterAsync(DiagnoseClusterRequest request) { + return diagnoseClusterOperationCallable().futureCall(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD. + /** + * Gets cluster diagnostic information. The returned + * [Operation.metadata][google.longrunning.Operation.metadata] will be + * [ClusterOperationMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#clusteroperationmetadata). + * After the operation completes, [Operation.response][google.longrunning.Operation.response] + * contains + * [DiagnoseClusterResults](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#diagnoseclusterresults). + * + *

Sample code: + */ + public final OperationCallable< + DiagnoseClusterRequest, DiagnoseClusterResults, ClusterOperationMetadata> + diagnoseClusterOperationCallable() { + return stub.diagnoseClusterOperationCallable(); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD. + /** + * Gets cluster diagnostic information. The returned + * [Operation.metadata][google.longrunning.Operation.metadata] will be + * [ClusterOperationMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#clusteroperationmetadata). + * After the operation completes, [Operation.response][google.longrunning.Operation.response] + * contains + * [DiagnoseClusterResults](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#diagnoseclusterresults). + * + *

Sample code: + */ + public final UnaryCallable diagnoseClusterCallable() { + return stub.diagnoseClusterCallable(); + } + @Override public final void close() { stub.close(); diff --git a/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/ClusterControllerSettings.java b/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/ClusterControllerSettings.java index 421263de..b8b22b72 100644 --- a/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/ClusterControllerSettings.java +++ b/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/ClusterControllerSettings.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,6 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.dataproc.v1; import static com.google.cloud.dataproc.v1.ClusterControllerClient.ListClustersPagedResponse; @@ -36,7 +37,7 @@ import java.util.List; import javax.annotation.Generated; -// AUTO-GENERATED DOCUMENTATION AND CLASS +// AUTO-GENERATED DOCUMENTATION AND CLASS. /** * Settings class to configure an instance of {@link ClusterControllerClient}. * @@ -53,31 +54,30 @@ * *

For example, to set the total timeout of getCluster to 30 seconds: * - *

- * 
+ * 
{@code
  * ClusterControllerSettings.Builder clusterControllerSettingsBuilder =
  *     ClusterControllerSettings.newBuilder();
  * clusterControllerSettingsBuilder
  *     .getClusterSettings()
  *     .setRetrySettings(
- *         clusterControllerSettingsBuilder.getClusterSettings().getRetrySettings().toBuilder()
+ *         clusterControllerSettingsBuilder
+ *             .getClusterSettings()
+ *             .getRetrySettings()
+ *             .toBuilder()
  *             .setTotalTimeout(Duration.ofSeconds(30))
  *             .build());
  * ClusterControllerSettings clusterControllerSettings = clusterControllerSettingsBuilder.build();
- * 
- * 
+ * }
*/ -@Generated("by gapic-generator") -@BetaApi +@Generated("by gapic-generator-java") public class ClusterControllerSettings extends ClientSettings { + /** Returns the object with the settings used for calls to createCluster. */ public UnaryCallSettings createClusterSettings() { return ((ClusterControllerStubSettings) getStubSettings()).createClusterSettings(); } /** Returns the object with the settings used for calls to createCluster. */ - @BetaApi( - "The surface for long-running operations is not stable yet and may change in the future.") public OperationCallSettings createClusterOperationSettings() { return ((ClusterControllerStubSettings) getStubSettings()).createClusterOperationSettings(); @@ -89,8 +89,6 @@ public UnaryCallSettings updateClusterSettings( } /** Returns the object with the settings used for calls to updateCluster. */ - @BetaApi( - "The surface for long-running operations is not stable yet and may change in the future.") public OperationCallSettings updateClusterOperationSettings() { return ((ClusterControllerStubSettings) getStubSettings()).updateClusterOperationSettings(); @@ -102,38 +100,34 @@ public UnaryCallSettings deleteClusterSettings( } /** Returns the object with the settings used for calls to deleteCluster. */ - @BetaApi( - "The surface for long-running operations is not stable yet and may change in the future.") public OperationCallSettings deleteClusterOperationSettings() { return ((ClusterControllerStubSettings) getStubSettings()).deleteClusterOperationSettings(); } + /** Returns the object with the settings used for calls to getCluster. */ + public UnaryCallSettings getClusterSettings() { + return ((ClusterControllerStubSettings) getStubSettings()).getClusterSettings(); + } + + /** Returns the object with the settings used for calls to listClusters. */ + public PagedCallSettings + listClustersSettings() { + return ((ClusterControllerStubSettings) getStubSettings()).listClustersSettings(); + } + /** Returns the object with the settings used for calls to diagnoseCluster. */ public UnaryCallSettings diagnoseClusterSettings() { return ((ClusterControllerStubSettings) getStubSettings()).diagnoseClusterSettings(); } /** Returns the object with the settings used for calls to diagnoseCluster. */ - @BetaApi( - "The surface for long-running operations is not stable yet and may change in the future.") public OperationCallSettings< DiagnoseClusterRequest, DiagnoseClusterResults, ClusterOperationMetadata> diagnoseClusterOperationSettings() { return ((ClusterControllerStubSettings) getStubSettings()).diagnoseClusterOperationSettings(); } - /** Returns the object with the settings used for calls to getCluster. */ - public UnaryCallSettings getClusterSettings() { - return ((ClusterControllerStubSettings) getStubSettings()).getClusterSettings(); - } - - /** Returns the object with the settings used for calls to listClusters. */ - public PagedCallSettings - listClustersSettings() { - return ((ClusterControllerStubSettings) getStubSettings()).listClustersSettings(); - } - public static final ClusterControllerSettings create(ClusterControllerStubSettings stub) throws IOException { return new ClusterControllerSettings.Builder(stub.toBuilder()).build(); @@ -194,18 +188,15 @@ protected ClusterControllerSettings(Builder settingsBuilder) throws IOException /** Builder for ClusterControllerSettings. */ public static class Builder extends ClientSettings.Builder { + protected Builder() throws IOException { - this((ClientContext) null); + this(((ClientContext) null)); } protected Builder(ClientContext clientContext) { super(ClusterControllerStubSettings.newBuilder(clientContext)); } - private static Builder createDefault() { - return new Builder(ClusterControllerStubSettings.newBuilder()); - } - protected Builder(ClusterControllerSettings settings) { super(settings.getStubSettings().toBuilder()); } @@ -214,11 +205,15 @@ protected Builder(ClusterControllerStubSettings.Builder stubSettings) { super(stubSettings); } + private static Builder createDefault() { + return new Builder(ClusterControllerStubSettings.newBuilder()); + } + public ClusterControllerStubSettings.Builder getStubSettingsBuilder() { return ((ClusterControllerStubSettings.Builder) getStubSettings()); } - // NEXT_MAJOR_VER: remove 'throws Exception' + // NEXT_MAJOR_VER: remove 'throws Exception'. /** * Applies the given settings updater function to all of the unary API methods in this service. * @@ -237,8 +232,6 @@ public UnaryCallSettings.Builder createClusterS } /** Returns the builder for the settings used for calls to createCluster. */ - @BetaApi( - "The surface for long-running operations is not stable yet and may change in the future.") public OperationCallSettings.Builder createClusterOperationSettings() { return getStubSettingsBuilder().createClusterOperationSettings(); @@ -250,8 +243,6 @@ public UnaryCallSettings.Builder updateClusterS } /** Returns the builder for the settings used for calls to updateCluster. */ - @BetaApi( - "The surface for long-running operations is not stable yet and may change in the future.") public OperationCallSettings.Builder updateClusterOperationSettings() { return getStubSettingsBuilder().updateClusterOperationSettings(); @@ -263,27 +254,11 @@ public UnaryCallSettings.Builder deleteClusterS } /** Returns the builder for the settings used for calls to deleteCluster. */ - @BetaApi( - "The surface for long-running operations is not stable yet and may change in the future.") public OperationCallSettings.Builder deleteClusterOperationSettings() { return getStubSettingsBuilder().deleteClusterOperationSettings(); } - /** Returns the builder for the settings used for calls to diagnoseCluster. */ - public UnaryCallSettings.Builder diagnoseClusterSettings() { - return getStubSettingsBuilder().diagnoseClusterSettings(); - } - - /** Returns the builder for the settings used for calls to diagnoseCluster. */ - @BetaApi( - "The surface for long-running operations is not stable yet and may change in the future.") - public OperationCallSettings.Builder< - DiagnoseClusterRequest, DiagnoseClusterResults, ClusterOperationMetadata> - diagnoseClusterOperationSettings() { - return getStubSettingsBuilder().diagnoseClusterOperationSettings(); - } - /** Returns the builder for the settings used for calls to getCluster. */ public UnaryCallSettings.Builder getClusterSettings() { return getStubSettingsBuilder().getClusterSettings(); @@ -296,6 +271,18 @@ public UnaryCallSettings.Builder getClusterSettings( return getStubSettingsBuilder().listClustersSettings(); } + /** Returns the builder for the settings used for calls to diagnoseCluster. */ + public UnaryCallSettings.Builder diagnoseClusterSettings() { + return getStubSettingsBuilder().diagnoseClusterSettings(); + } + + /** Returns the builder for the settings used for calls to diagnoseCluster. */ + public OperationCallSettings.Builder< + DiagnoseClusterRequest, DiagnoseClusterResults, ClusterOperationMetadata> + diagnoseClusterOperationSettings() { + return getStubSettingsBuilder().diagnoseClusterOperationSettings(); + } + @Override public ClusterControllerSettings build() throws IOException { return new ClusterControllerSettings(this); diff --git a/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/JobControllerClient.java b/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/JobControllerClient.java index 51856e4c..ca6eb688 100644 --- a/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/JobControllerClient.java +++ b/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/JobControllerClient.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,6 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.dataproc.v1; import com.google.api.core.ApiFunction; @@ -38,25 +39,14 @@ import java.util.concurrent.TimeUnit; import javax.annotation.Generated; -// AUTO-GENERATED DOCUMENTATION AND SERVICE +// AUTO-GENERATED DOCUMENTATION AND CLASS. /** * Service Description: The JobController provides methods to manage jobs. * *

This class provides the ability to make remote calls to the backing service through method * calls that map to API methods. Sample code to get started: * - *

- * 
- * try (JobControllerClient jobControllerClient = JobControllerClient.create()) {
- *   String projectId = "";
- *   String region = "";
- *   Job job = Job.newBuilder().build();
- *   Job response = jobControllerClient.submitJob(projectId, region, job);
- * }
- * 
- * 
- * - *

Note: close() needs to be called on the jobControllerClient object to clean up resources such + *

Note: close() needs to be called on the JobControllerClient object to clean up resources such * as threads. In the example above, try-with-resources is used, which automatically calls close(). * *

The surface of this class includes several types of Java methods for each of the API's @@ -84,30 +74,26 @@ * *

To customize credentials: * - *

- * 
+ * 
{@code
  * JobControllerSettings jobControllerSettings =
  *     JobControllerSettings.newBuilder()
  *         .setCredentialsProvider(FixedCredentialsProvider.create(myCredentials))
  *         .build();
- * JobControllerClient jobControllerClient =
- *     JobControllerClient.create(jobControllerSettings);
- * 
- * 
+ * JobControllerClient jobControllerClient = JobControllerClient.create(jobControllerSettings); + * }
* - * To customize the endpoint: + *

To customize the endpoint: * - *

- * 
+ * 
{@code
  * JobControllerSettings jobControllerSettings =
  *     JobControllerSettings.newBuilder().setEndpoint(myEndpoint).build();
- * JobControllerClient jobControllerClient =
- *     JobControllerClient.create(jobControllerSettings);
- * 
- * 
+ * JobControllerClient jobControllerClient = JobControllerClient.create(jobControllerSettings); + * }
+ * + *

Please refer to the GitHub repository's samples for more quickstart code snippets. */ -@Generated("by gapic-generator") @BetaApi +@Generated("by gapic-generator") public class JobControllerClient implements BackgroundResource { private final JobControllerSettings settings; private final JobControllerStub stub; @@ -129,7 +115,7 @@ public static final JobControllerClient create(JobControllerSettings settings) /** * Constructs an instance of JobControllerClient, using the given stub for making calls. This is - * for advanced usage - prefer to use JobControllerSettings}. + * for advanced usage - prefer using create(JobControllerSettings). */ @BetaApi("A restructuring of stub classes is planned, so this may break in the future") public static final JobControllerClient create(JobControllerStub stub) { @@ -167,27 +153,14 @@ public JobControllerStub getStub() { * Returns the OperationsClient that can be used to query the status of a long-running operation * returned by another API method call. */ - @BetaApi( - "The surface for long-running operations is not stable yet and may change in the future.") public final OperationsClient getOperationsClient() { return operationsClient; } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Submits a job to a cluster. * - *

Sample code: - * - *


-   * try (JobControllerClient jobControllerClient = JobControllerClient.create()) {
-   *   String projectId = "";
-   *   String region = "";
-   *   Job job = Job.newBuilder().build();
-   *   Job response = jobControllerClient.submitJob(projectId, region, job);
-   * }
-   * 
- * * @param projectId Required. The ID of the Google Cloud Platform project that the job belongs to. * @param region Required. The Dataproc region in which to handle the request. * @param job Required. The job resource. @@ -199,26 +172,10 @@ public final Job submitJob(String projectId, String region, Job job) { return submitJob(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Submits a job to a cluster. * - *

Sample code: - * - *


-   * try (JobControllerClient jobControllerClient = JobControllerClient.create()) {
-   *   String projectId = "";
-   *   String region = "";
-   *   Job job = Job.newBuilder().build();
-   *   SubmitJobRequest request = SubmitJobRequest.newBuilder()
-   *     .setProjectId(projectId)
-   *     .setRegion(region)
-   *     .setJob(job)
-   *     .build();
-   *   Job response = jobControllerClient.submitJob(request);
-   * }
-   * 
- * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -226,54 +183,25 @@ public final Job submitJob(SubmitJobRequest request) { return submitJobCallable().call(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Submits a job to a cluster. * *

Sample code: - * - *


-   * try (JobControllerClient jobControllerClient = JobControllerClient.create()) {
-   *   String projectId = "";
-   *   String region = "";
-   *   Job job = Job.newBuilder().build();
-   *   SubmitJobRequest request = SubmitJobRequest.newBuilder()
-   *     .setProjectId(projectId)
-   *     .setRegion(region)
-   *     .setJob(job)
-   *     .build();
-   *   ApiFuture<Job> future = jobControllerClient.submitJobCallable().futureCall(request);
-   *   // Do something
-   *   Job response = future.get();
-   * }
-   * 
*/ public final UnaryCallable submitJobCallable() { return stub.submitJobCallable(); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Submits job to a cluster. * - *

Sample code: - * - *


-   * try (JobControllerClient jobControllerClient = JobControllerClient.create()) {
-   *   String projectId = "";
-   *   String region = "";
-   *   Job job = Job.newBuilder().build();
-   *   Job response = jobControllerClient.submitJobAsOperationAsync(projectId, region, job).get();
-   * }
-   * 
- * * @param projectId Required. The ID of the Google Cloud Platform project that the job belongs to. * @param region Required. The Dataproc region in which to handle the request. * @param job Required. The job resource. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ - @BetaApi( - "The surface for long-running operations is not stable yet and may change in the future.") public final OperationFuture submitJobAsOperationAsync( String projectId, String region, Job job) { SubmitJobRequest request = @@ -281,105 +209,43 @@ public final OperationFuture submitJobAsOperationAsync( return submitJobAsOperationAsync(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Submits job to a cluster. * - *

Sample code: - * - *


-   * try (JobControllerClient jobControllerClient = JobControllerClient.create()) {
-   *   String projectId = "";
-   *   String region = "";
-   *   Job job = Job.newBuilder().build();
-   *   SubmitJobRequest request = SubmitJobRequest.newBuilder()
-   *     .setProjectId(projectId)
-   *     .setRegion(region)
-   *     .setJob(job)
-   *     .build();
-   *   Job response = jobControllerClient.submitJobAsOperationAsync(request).get();
-   * }
-   * 
- * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ - @BetaApi( - "The surface for long-running operations is not stable yet and may change in the future.") public final OperationFuture submitJobAsOperationAsync( SubmitJobRequest request) { return submitJobAsOperationOperationCallable().futureCall(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Submits job to a cluster. * *

Sample code: - * - *


-   * try (JobControllerClient jobControllerClient = JobControllerClient.create()) {
-   *   String projectId = "";
-   *   String region = "";
-   *   Job job = Job.newBuilder().build();
-   *   SubmitJobRequest request = SubmitJobRequest.newBuilder()
-   *     .setProjectId(projectId)
-   *     .setRegion(region)
-   *     .setJob(job)
-   *     .build();
-   *   OperationFuture<Job, JobMetadata> future = jobControllerClient.submitJobAsOperationOperationCallable().futureCall(request);
-   *   // Do something
-   *   Job response = future.get();
-   * }
-   * 
*/ - @BetaApi("The surface for use by generated code is not stable yet and may change in the future.") public final OperationCallable submitJobAsOperationOperationCallable() { return stub.submitJobAsOperationOperationCallable(); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Submits job to a cluster. * *

Sample code: - * - *


-   * try (JobControllerClient jobControllerClient = JobControllerClient.create()) {
-   *   String projectId = "";
-   *   String region = "";
-   *   Job job = Job.newBuilder().build();
-   *   SubmitJobRequest request = SubmitJobRequest.newBuilder()
-   *     .setProjectId(projectId)
-   *     .setRegion(region)
-   *     .setJob(job)
-   *     .build();
-   *   ApiFuture<Operation> future = jobControllerClient.submitJobAsOperationCallable().futureCall(request);
-   *   // Do something
-   *   Operation response = future.get();
-   * }
-   * 
*/ public final UnaryCallable submitJobAsOperationCallable() { return stub.submitJobAsOperationCallable(); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Gets the resource representation for a job in a project. * - *

Sample code: - * - *


-   * try (JobControllerClient jobControllerClient = JobControllerClient.create()) {
-   *   String projectId = "";
-   *   String region = "";
-   *   String jobId = "";
-   *   Job response = jobControllerClient.getJob(projectId, region, jobId);
-   * }
-   * 
- * * @param projectId Required. The ID of the Google Cloud Platform project that the job belongs to. * @param region Required. The Dataproc region in which to handle the request. * @param jobId Required. The job ID. @@ -395,26 +261,10 @@ public final Job getJob(String projectId, String region, String jobId) { return getJob(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Gets the resource representation for a job in a project. * - *

Sample code: - * - *


-   * try (JobControllerClient jobControllerClient = JobControllerClient.create()) {
-   *   String projectId = "";
-   *   String region = "";
-   *   String jobId = "";
-   *   GetJobRequest request = GetJobRequest.newBuilder()
-   *     .setProjectId(projectId)
-   *     .setRegion(region)
-   *     .setJobId(jobId)
-   *     .build();
-   *   Job response = jobControllerClient.getJob(request);
-   * }
-   * 
- * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -422,48 +272,20 @@ public final Job getJob(GetJobRequest request) { return getJobCallable().call(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Gets the resource representation for a job in a project. * *

Sample code: - * - *


-   * try (JobControllerClient jobControllerClient = JobControllerClient.create()) {
-   *   String projectId = "";
-   *   String region = "";
-   *   String jobId = "";
-   *   GetJobRequest request = GetJobRequest.newBuilder()
-   *     .setProjectId(projectId)
-   *     .setRegion(region)
-   *     .setJobId(jobId)
-   *     .build();
-   *   ApiFuture<Job> future = jobControllerClient.getJobCallable().futureCall(request);
-   *   // Do something
-   *   Job response = future.get();
-   * }
-   * 
*/ public final UnaryCallable getJobCallable() { return stub.getJobCallable(); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Lists regions/{region}/jobs in a project. * - *

Sample code: - * - *


-   * try (JobControllerClient jobControllerClient = JobControllerClient.create()) {
-   *   String projectId = "";
-   *   String region = "";
-   *   for (Job element : jobControllerClient.listJobs(projectId, region).iterateAll()) {
-   *     // doThingsWith(element);
-   *   }
-   * }
-   * 
- * * @param projectId Required. The ID of the Google Cloud Platform project that the job belongs to. * @param region Required. The Dataproc region in which to handle the request. * @throws com.google.api.gax.rpc.ApiException if the remote call fails @@ -474,23 +296,10 @@ public final ListJobsPagedResponse listJobs(String projectId, String region) { return listJobs(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Lists regions/{region}/jobs in a project. * - *

Sample code: - * - *


-   * try (JobControllerClient jobControllerClient = JobControllerClient.create()) {
-   *   String projectId = "";
-   *   String region = "";
-   *   String filter = "";
-   *   for (Job element : jobControllerClient.listJobs(projectId, region, filter).iterateAll()) {
-   *     // doThingsWith(element);
-   *   }
-   * }
-   * 
- * * @param projectId Required. The ID of the Google Cloud Platform project that the job belongs to. * @param region Required. The Dataproc region in which to handle the request. * @param filter Optional. A filter constraining the jobs to list. Filters are case-sensitive and @@ -514,26 +323,10 @@ public final ListJobsPagedResponse listJobs(String projectId, String region, Str return listJobs(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Lists regions/{region}/jobs in a project. * - *

Sample code: - * - *


-   * try (JobControllerClient jobControllerClient = JobControllerClient.create()) {
-   *   String projectId = "";
-   *   String region = "";
-   *   ListJobsRequest request = ListJobsRequest.newBuilder()
-   *     .setProjectId(projectId)
-   *     .setRegion(region)
-   *     .build();
-   *   for (Job element : jobControllerClient.listJobs(request).iterateAll()) {
-   *     // doThingsWith(element);
-   *   }
-   * }
-   * 
- * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -541,89 +334,30 @@ public final ListJobsPagedResponse listJobs(ListJobsRequest request) { return listJobsPagedCallable().call(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Lists regions/{region}/jobs in a project. * *

Sample code: - * - *


-   * try (JobControllerClient jobControllerClient = JobControllerClient.create()) {
-   *   String projectId = "";
-   *   String region = "";
-   *   ListJobsRequest request = ListJobsRequest.newBuilder()
-   *     .setProjectId(projectId)
-   *     .setRegion(region)
-   *     .build();
-   *   ApiFuture<ListJobsPagedResponse> future = jobControllerClient.listJobsPagedCallable().futureCall(request);
-   *   // Do something
-   *   for (Job element : future.get().iterateAll()) {
-   *     // doThingsWith(element);
-   *   }
-   * }
-   * 
*/ public final UnaryCallable listJobsPagedCallable() { return stub.listJobsPagedCallable(); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Lists regions/{region}/jobs in a project. * *

Sample code: - * - *


-   * try (JobControllerClient jobControllerClient = JobControllerClient.create()) {
-   *   String projectId = "";
-   *   String region = "";
-   *   ListJobsRequest request = ListJobsRequest.newBuilder()
-   *     .setProjectId(projectId)
-   *     .setRegion(region)
-   *     .build();
-   *   while (true) {
-   *     ListJobsResponse response = jobControllerClient.listJobsCallable().call(request);
-   *     for (Job element : response.getJobsList()) {
-   *       // doThingsWith(element);
-   *     }
-   *     String nextPageToken = response.getNextPageToken();
-   *     if (!Strings.isNullOrEmpty(nextPageToken)) {
-   *       request = request.toBuilder().setPageToken(nextPageToken).build();
-   *     } else {
-   *       break;
-   *     }
-   *   }
-   * }
-   * 
*/ public final UnaryCallable listJobsCallable() { return stub.listJobsCallable(); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Updates a job in a project. * - *

Sample code: - * - *


-   * try (JobControllerClient jobControllerClient = JobControllerClient.create()) {
-   *   String projectId = "";
-   *   String region = "";
-   *   String jobId = "";
-   *   Job job = Job.newBuilder().build();
-   *   FieldMask updateMask = FieldMask.newBuilder().build();
-   *   UpdateJobRequest request = UpdateJobRequest.newBuilder()
-   *     .setProjectId(projectId)
-   *     .setRegion(region)
-   *     .setJobId(jobId)
-   *     .setJob(job)
-   *     .setUpdateMask(updateMask)
-   *     .build();
-   *   Job response = jobControllerClient.updateJob(request);
-   * }
-   * 
- * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -631,54 +365,23 @@ public final Job updateJob(UpdateJobRequest request) { return updateJobCallable().call(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Updates a job in a project. * *

Sample code: - * - *


-   * try (JobControllerClient jobControllerClient = JobControllerClient.create()) {
-   *   String projectId = "";
-   *   String region = "";
-   *   String jobId = "";
-   *   Job job = Job.newBuilder().build();
-   *   FieldMask updateMask = FieldMask.newBuilder().build();
-   *   UpdateJobRequest request = UpdateJobRequest.newBuilder()
-   *     .setProjectId(projectId)
-   *     .setRegion(region)
-   *     .setJobId(jobId)
-   *     .setJob(job)
-   *     .setUpdateMask(updateMask)
-   *     .build();
-   *   ApiFuture<Job> future = jobControllerClient.updateJobCallable().futureCall(request);
-   *   // Do something
-   *   Job response = future.get();
-   * }
-   * 
*/ public final UnaryCallable updateJobCallable() { return stub.updateJobCallable(); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Starts a job cancellation request. To access the job resource after cancellation, call * [regions/{region}/jobs.list](https://cloud.google.com/dataproc/docs/reference/rest/v1/projects.regions.jobs/list) * or * [regions/{region}/jobs.get](https://cloud.google.com/dataproc/docs/reference/rest/v1/projects.regions.jobs/get). * - *

Sample code: - * - *


-   * try (JobControllerClient jobControllerClient = JobControllerClient.create()) {
-   *   String projectId = "";
-   *   String region = "";
-   *   String jobId = "";
-   *   Job response = jobControllerClient.cancelJob(projectId, region, jobId);
-   * }
-   * 
- * * @param projectId Required. The ID of the Google Cloud Platform project that the job belongs to. * @param region Required. The Dataproc region in which to handle the request. * @param jobId Required. The job ID. @@ -694,29 +397,13 @@ public final Job cancelJob(String projectId, String region, String jobId) { return cancelJob(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Starts a job cancellation request. To access the job resource after cancellation, call * [regions/{region}/jobs.list](https://cloud.google.com/dataproc/docs/reference/rest/v1/projects.regions.jobs/list) * or * [regions/{region}/jobs.get](https://cloud.google.com/dataproc/docs/reference/rest/v1/projects.regions.jobs/get). * - *

Sample code: - * - *


-   * try (JobControllerClient jobControllerClient = JobControllerClient.create()) {
-   *   String projectId = "";
-   *   String region = "";
-   *   String jobId = "";
-   *   CancelJobRequest request = CancelJobRequest.newBuilder()
-   *     .setProjectId(projectId)
-   *     .setRegion(region)
-   *     .setJobId(jobId)
-   *     .build();
-   *   Job response = jobControllerClient.cancelJob(request);
-   * }
-   * 
- * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -724,7 +411,7 @@ public final Job cancelJob(CancelJobRequest request) { return cancelJobCallable().call(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Starts a job cancellation request. To access the job resource after cancellation, call * [regions/{region}/jobs.list](https://cloud.google.com/dataproc/docs/reference/rest/v1/projects.regions.jobs/list) @@ -732,43 +419,16 @@ public final Job cancelJob(CancelJobRequest request) { * [regions/{region}/jobs.get](https://cloud.google.com/dataproc/docs/reference/rest/v1/projects.regions.jobs/get). * *

Sample code: - * - *


-   * try (JobControllerClient jobControllerClient = JobControllerClient.create()) {
-   *   String projectId = "";
-   *   String region = "";
-   *   String jobId = "";
-   *   CancelJobRequest request = CancelJobRequest.newBuilder()
-   *     .setProjectId(projectId)
-   *     .setRegion(region)
-   *     .setJobId(jobId)
-   *     .build();
-   *   ApiFuture<Job> future = jobControllerClient.cancelJobCallable().futureCall(request);
-   *   // Do something
-   *   Job response = future.get();
-   * }
-   * 
*/ public final UnaryCallable cancelJobCallable() { return stub.cancelJobCallable(); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Deletes the job from the project. If the job is active, the delete fails, and the response * returns `FAILED_PRECONDITION`. * - *

Sample code: - * - *


-   * try (JobControllerClient jobControllerClient = JobControllerClient.create()) {
-   *   String projectId = "";
-   *   String region = "";
-   *   String jobId = "";
-   *   jobControllerClient.deleteJob(projectId, region, jobId);
-   * }
-   * 
- * * @param projectId Required. The ID of the Google Cloud Platform project that the job belongs to. * @param region Required. The Dataproc region in which to handle the request. * @param jobId Required. The job ID. @@ -784,27 +444,11 @@ public final void deleteJob(String projectId, String region, String jobId) { deleteJob(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Deletes the job from the project. If the job is active, the delete fails, and the response * returns `FAILED_PRECONDITION`. * - *

Sample code: - * - *


-   * try (JobControllerClient jobControllerClient = JobControllerClient.create()) {
-   *   String projectId = "";
-   *   String region = "";
-   *   String jobId = "";
-   *   DeleteJobRequest request = DeleteJobRequest.newBuilder()
-   *     .setProjectId(projectId)
-   *     .setRegion(region)
-   *     .setJobId(jobId)
-   *     .build();
-   *   jobControllerClient.deleteJob(request);
-   * }
-   * 
- * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -812,28 +456,12 @@ public final void deleteJob(DeleteJobRequest request) { deleteJobCallable().call(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Deletes the job from the project. If the job is active, the delete fails, and the response * returns `FAILED_PRECONDITION`. * *

Sample code: - * - *


-   * try (JobControllerClient jobControllerClient = JobControllerClient.create()) {
-   *   String projectId = "";
-   *   String region = "";
-   *   String jobId = "";
-   *   DeleteJobRequest request = DeleteJobRequest.newBuilder()
-   *     .setProjectId(projectId)
-   *     .setRegion(region)
-   *     .setJobId(jobId)
-   *     .build();
-   *   ApiFuture<Void> future = jobControllerClient.deleteJobCallable().futureCall(request);
-   *   // Do something
-   *   future.get();
-   * }
-   * 
*/ public final UnaryCallable deleteJobCallable() { return stub.deleteJobCallable(); diff --git a/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/JobControllerSettings.java b/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/JobControllerSettings.java index 7f3f2964..546f5cf7 100644 --- a/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/JobControllerSettings.java +++ b/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/JobControllerSettings.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,6 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.dataproc.v1; import static com.google.cloud.dataproc.v1.JobControllerClient.ListJobsPagedResponse; @@ -36,7 +37,7 @@ import java.util.List; import javax.annotation.Generated; -// AUTO-GENERATED DOCUMENTATION AND CLASS +// AUTO-GENERATED DOCUMENTATION AND CLASS. /** * Settings class to configure an instance of {@link JobControllerClient}. * @@ -53,36 +54,34 @@ * *

For example, to set the total timeout of submitJob to 30 seconds: * - *

- * 
- * JobControllerSettings.Builder jobControllerSettingsBuilder =
- *     JobControllerSettings.newBuilder();
+ * 
{@code
+ * JobControllerSettings.Builder jobControllerSettingsBuilder = JobControllerSettings.newBuilder();
  * jobControllerSettingsBuilder
  *     .submitJobSettings()
  *     .setRetrySettings(
- *         jobControllerSettingsBuilder.submitJobSettings().getRetrySettings().toBuilder()
+ *         jobControllerSettingsBuilder
+ *             .submitJobSettings()
+ *             .getRetrySettings()
+ *             .toBuilder()
  *             .setTotalTimeout(Duration.ofSeconds(30))
  *             .build());
  * JobControllerSettings jobControllerSettings = jobControllerSettingsBuilder.build();
- * 
- * 
+ * }
*/ -@Generated("by gapic-generator") -@BetaApi +@Generated("by gapic-generator-java") public class JobControllerSettings extends ClientSettings { + /** Returns the object with the settings used for calls to submitJob. */ public UnaryCallSettings submitJobSettings() { return ((JobControllerStubSettings) getStubSettings()).submitJobSettings(); } - /** Returns the object with the settings used for calls to submitJobAsOperation. */ + /** Returns the object with the settings used for calls to submitJobAs. */ public UnaryCallSettings submitJobAsOperationSettings() { return ((JobControllerStubSettings) getStubSettings()).submitJobAsOperationSettings(); } /** Returns the object with the settings used for calls to submitJobAsOperation. */ - @BetaApi( - "The surface for long-running operations is not stable yet and may change in the future.") public OperationCallSettings submitJobAsOperationOperationSettings() { return ((JobControllerStubSettings) getStubSettings()).submitJobAsOperationOperationSettings(); @@ -174,18 +173,15 @@ protected JobControllerSettings(Builder settingsBuilder) throws IOException { /** Builder for JobControllerSettings. */ public static class Builder extends ClientSettings.Builder { + protected Builder() throws IOException { - this((ClientContext) null); + this(((ClientContext) null)); } protected Builder(ClientContext clientContext) { super(JobControllerStubSettings.newBuilder(clientContext)); } - private static Builder createDefault() { - return new Builder(JobControllerStubSettings.newBuilder()); - } - protected Builder(JobControllerSettings settings) { super(settings.getStubSettings().toBuilder()); } @@ -194,11 +190,15 @@ protected Builder(JobControllerStubSettings.Builder stubSettings) { super(stubSettings); } + private static Builder createDefault() { + return new Builder(JobControllerStubSettings.newBuilder()); + } + public JobControllerStubSettings.Builder getStubSettingsBuilder() { return ((JobControllerStubSettings.Builder) getStubSettings()); } - // NEXT_MAJOR_VER: remove 'throws Exception' + // NEXT_MAJOR_VER: remove 'throws Exception'. /** * Applies the given settings updater function to all of the unary API methods in this service. * @@ -216,14 +216,12 @@ public UnaryCallSettings.Builder submitJobSettings() { return getStubSettingsBuilder().submitJobSettings(); } - /** Returns the builder for the settings used for calls to submitJobAsOperation. */ + /** Returns the builder for the settings used for calls to submitJobAs. */ public UnaryCallSettings.Builder submitJobAsOperationSettings() { return getStubSettingsBuilder().submitJobAsOperationSettings(); } /** Returns the builder for the settings used for calls to submitJobAsOperation. */ - @BetaApi( - "The surface for long-running operations is not stable yet and may change in the future.") public OperationCallSettings.Builder submitJobAsOperationOperationSettings() { return getStubSettingsBuilder().submitJobAsOperationOperationSettings(); diff --git a/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/WorkflowTemplateServiceClient.java b/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/WorkflowTemplateServiceClient.java index 87932168..33a0e328 100644 --- a/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/WorkflowTemplateServiceClient.java +++ b/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/WorkflowTemplateServiceClient.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,6 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.dataproc.v1; import com.google.api.core.ApiFunction; @@ -39,24 +40,14 @@ import java.util.concurrent.TimeUnit; import javax.annotation.Generated; -// AUTO-GENERATED DOCUMENTATION AND SERVICE +// AUTO-GENERATED DOCUMENTATION AND CLASS. /** * Service Description: The API interface for managing Workflow Templates in the Dataproc API. * *

This class provides the ability to make remote calls to the backing service through method * calls that map to API methods. Sample code to get started: * - *

- * 
- * try (WorkflowTemplateServiceClient workflowTemplateServiceClient = WorkflowTemplateServiceClient.create()) {
- *   RegionName parent = RegionName.of("[PROJECT]", "[REGION]");
- *   WorkflowTemplate template = WorkflowTemplate.newBuilder().build();
- *   WorkflowTemplate response = workflowTemplateServiceClient.createWorkflowTemplate(parent, template);
- * }
- * 
- * 
- * - *

Note: close() needs to be called on the workflowTemplateServiceClient object to clean up + *

Note: close() needs to be called on the WorkflowTemplateServiceClient object to clean up * resources such as threads. In the example above, try-with-resources is used, which automatically * calls close(). * @@ -85,30 +76,28 @@ * *

To customize credentials: * - *

- * 
+ * 
{@code
  * WorkflowTemplateServiceSettings workflowTemplateServiceSettings =
  *     WorkflowTemplateServiceSettings.newBuilder()
  *         .setCredentialsProvider(FixedCredentialsProvider.create(myCredentials))
  *         .build();
  * WorkflowTemplateServiceClient workflowTemplateServiceClient =
  *     WorkflowTemplateServiceClient.create(workflowTemplateServiceSettings);
- * 
- * 
+ * }
* - * To customize the endpoint: + *

To customize the endpoint: * - *

- * 
+ * 
{@code
  * WorkflowTemplateServiceSettings workflowTemplateServiceSettings =
  *     WorkflowTemplateServiceSettings.newBuilder().setEndpoint(myEndpoint).build();
  * WorkflowTemplateServiceClient workflowTemplateServiceClient =
  *     WorkflowTemplateServiceClient.create(workflowTemplateServiceSettings);
- * 
- * 
+ * }
+ * + *

Please refer to the GitHub repository's samples for more quickstart code snippets. */ -@Generated("by gapic-generator") @BetaApi +@Generated("by gapic-generator") public class WorkflowTemplateServiceClient implements BackgroundResource { private final WorkflowTemplateServiceSettings settings; private final WorkflowTemplateServiceStub stub; @@ -130,7 +119,7 @@ public static final WorkflowTemplateServiceClient create(WorkflowTemplateService /** * Constructs an instance of WorkflowTemplateServiceClient, using the given stub for making calls. - * This is for advanced usage - prefer to use WorkflowTemplateServiceSettings}. + * This is for advanced usage - prefer using create(WorkflowTemplateServiceSettings). */ @BetaApi("A restructuring of stub classes is planned, so this may break in the future") public static final WorkflowTemplateServiceClient create(WorkflowTemplateServiceStub stub) { @@ -169,13 +158,195 @@ public WorkflowTemplateServiceStub getStub() { * Returns the OperationsClient that can be used to query the status of a long-running operation * returned by another API method call. */ - @BetaApi( - "The surface for long-running operations is not stable yet and may change in the future.") public final OperationsClient getOperationsClient() { return operationsClient; } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. + /** + * Creates new workflow template. + * + * @param parent Required. The resource name of the region or location, as described in + * https://cloud.google.com/apis/design/resource_names. + *

    + *
  • For `projects.regions.workflowTemplates,create`, the resource name of the region has + * the following format: `projects/{project_id}/regions/{region}` + *
+ *
    + *
  • For `projects.locations.workflowTemplates.create`, the resource name of the location + * has the following format: `projects/{project_id}/locations/{location}` + *
+ * + * @param template Required. The Dataproc workflow template to create. + * @throws com.google.api.gax.rpc.ApiException if the remote call fails + */ + public final WorkflowTemplate createWorkflowTemplate( + LocationName parent, WorkflowTemplate template) { + CreateWorkflowTemplateRequest request = + CreateWorkflowTemplateRequest.newBuilder() + .setParent(parent == null ? null : parent.toString()) + .setTemplate(template) + .build(); + return createWorkflowTemplate(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD. + /** + * Creates new workflow template. + * + * @param parent Required. The resource name of the region or location, as described in + * https://cloud.google.com/apis/design/resource_names. + *
    + *
  • For `projects.regions.workflowTemplates,create`, the resource name of the region has + * the following format: `projects/{project_id}/regions/{region}` + *
+ *
    + *
  • For `projects.locations.workflowTemplates.create`, the resource name of the location + * has the following format: `projects/{project_id}/locations/{location}` + *
+ * + * @param template Required. The Dataproc workflow template to create. + * @throws com.google.api.gax.rpc.ApiException if the remote call fails + */ + public final WorkflowTemplate createWorkflowTemplate( + RegionName parent, WorkflowTemplate template) { + CreateWorkflowTemplateRequest request = + CreateWorkflowTemplateRequest.newBuilder() + .setParent(parent == null ? null : parent.toString()) + .setTemplate(template) + .build(); + return createWorkflowTemplate(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD. + /** + * Creates new workflow template. + * + * @param parent Required. The resource name of the region or location, as described in + * https://cloud.google.com/apis/design/resource_names. + *
    + *
  • For `projects.regions.workflowTemplates,create`, the resource name of the region has + * the following format: `projects/{project_id}/regions/{region}` + *
+ *
    + *
  • For `projects.locations.workflowTemplates.create`, the resource name of the location + * has the following format: `projects/{project_id}/locations/{location}` + *
+ * + * @param template Required. The Dataproc workflow template to create. + * @throws com.google.api.gax.rpc.ApiException if the remote call fails + */ + public final WorkflowTemplate createWorkflowTemplate(String parent, WorkflowTemplate template) { + CreateWorkflowTemplateRequest request = + CreateWorkflowTemplateRequest.newBuilder().setParent(parent).setTemplate(template).build(); + return createWorkflowTemplate(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD. + /** + * Creates new workflow template. + * + * @param request The request object containing all of the parameters for the API call. + * @throws com.google.api.gax.rpc.ApiException if the remote call fails + */ + public final WorkflowTemplate createWorkflowTemplate(CreateWorkflowTemplateRequest request) { + return createWorkflowTemplateCallable().call(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD. + /** + * Creates new workflow template. + * + *

Sample code: + */ + public final UnaryCallable + createWorkflowTemplateCallable() { + return stub.createWorkflowTemplateCallable(); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD. + /** + * Retrieves the latest workflow template. + * + *

Can retrieve previously instantiated template by specifying optional version parameter. + * + * @param name Required. The resource name of the workflow template, as described in + * https://cloud.google.com/apis/design/resource_names. + *

    + *
  • For `projects.regions.workflowTemplates.get`, the resource name of the template has + * the following format: + * `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}` + *
+ *
    + *
  • For `projects.locations.workflowTemplates.get`, the resource name of the template has + * the following format: + * `projects/{project_id}/locations/{location}/workflowTemplates/{template_id}` + *
+ * + * @throws com.google.api.gax.rpc.ApiException if the remote call fails + */ + public final WorkflowTemplate getWorkflowTemplate(WorkflowTemplateName name) { + GetWorkflowTemplateRequest request = + GetWorkflowTemplateRequest.newBuilder() + .setName(name == null ? null : name.toString()) + .build(); + return getWorkflowTemplate(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD. + /** + * Retrieves the latest workflow template. + * + *

Can retrieve previously instantiated template by specifying optional version parameter. + * + * @param name Required. The resource name of the workflow template, as described in + * https://cloud.google.com/apis/design/resource_names. + *

    + *
  • For `projects.regions.workflowTemplates.get`, the resource name of the template has + * the following format: + * `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}` + *
+ *
    + *
  • For `projects.locations.workflowTemplates.get`, the resource name of the template has + * the following format: + * `projects/{project_id}/locations/{location}/workflowTemplates/{template_id}` + *
+ * + * @throws com.google.api.gax.rpc.ApiException if the remote call fails + */ + public final WorkflowTemplate getWorkflowTemplate(String name) { + GetWorkflowTemplateRequest request = + GetWorkflowTemplateRequest.newBuilder().setName(name).build(); + return getWorkflowTemplate(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD. + /** + * Retrieves the latest workflow template. + * + *

Can retrieve previously instantiated template by specifying optional version parameter. + * + * @param request The request object containing all of the parameters for the API call. + * @throws com.google.api.gax.rpc.ApiException if the remote call fails + */ + public final WorkflowTemplate getWorkflowTemplate(GetWorkflowTemplateRequest request) { + return getWorkflowTemplateCallable().call(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD. + /** + * Retrieves the latest workflow template. + * + *

Can retrieve previously instantiated template by specifying optional version parameter. + * + *

Sample code: + */ + public final UnaryCallable + getWorkflowTemplateCallable() { + return stub.getWorkflowTemplateCallable(); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Instantiates a template and begins execution. * @@ -195,27 +366,21 @@ public final OperationsClient getOperationsClient() { *

On successful completion, [Operation.response][google.longrunning.Operation.response] will * be [Empty][google.protobuf.Empty]. * - *

Sample code: - * - *


-   * try (WorkflowTemplateServiceClient workflowTemplateServiceClient = WorkflowTemplateServiceClient.create()) {
-   *   WorkflowTemplateName name = WorkflowTemplateName.ofProjectRegionWorkflowTemplateName("[PROJECT]", "[REGION]", "[WORKFLOW_TEMPLATE]");
-   *   workflowTemplateServiceClient.instantiateWorkflowTemplateAsync(name).get();
-   * }
-   * 
- * * @param name Required. The resource name of the workflow template, as described in * https://cloud.google.com/apis/design/resource_names. - *

* For `projects.regions.workflowTemplates.instantiate`, the resource name of the - * template has the following format: - * `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}` - *

* For `projects.locations.workflowTemplates.instantiate`, the resource name of the - * template has the following format: - * `projects/{project_id}/locations/{location}/workflowTemplates/{template_id}` + *

    + *
  • For `projects.regions.workflowTemplates.instantiate`, the resource name of the + * template has the following format: + * `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}` + *
+ *
    + *
  • For `projects.locations.workflowTemplates.instantiate`, the resource name of the + * template has the following format: + * `projects/{project_id}/locations/{location}/workflowTemplates/{template_id}` + *
+ * * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ - @BetaApi( - "The surface for long-running operations is not stable yet and may change in the future.") public final OperationFuture instantiateWorkflowTemplateAsync( WorkflowTemplateName name) { InstantiateWorkflowTemplateRequest request = @@ -225,7 +390,7 @@ public final OperationFuture instantiateWorkflowTemplat return instantiateWorkflowTemplateAsync(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Instantiates a template and begins execution. * @@ -245,27 +410,21 @@ public final OperationFuture instantiateWorkflowTemplat *

On successful completion, [Operation.response][google.longrunning.Operation.response] will * be [Empty][google.protobuf.Empty]. * - *

Sample code: - * - *


-   * try (WorkflowTemplateServiceClient workflowTemplateServiceClient = WorkflowTemplateServiceClient.create()) {
-   *   WorkflowTemplateName name = WorkflowTemplateName.ofProjectRegionWorkflowTemplateName("[PROJECT]", "[REGION]", "[WORKFLOW_TEMPLATE]");
-   *   workflowTemplateServiceClient.instantiateWorkflowTemplateAsync(name.toString()).get();
-   * }
-   * 
- * * @param name Required. The resource name of the workflow template, as described in * https://cloud.google.com/apis/design/resource_names. - *

* For `projects.regions.workflowTemplates.instantiate`, the resource name of the - * template has the following format: - * `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}` - *

* For `projects.locations.workflowTemplates.instantiate`, the resource name of the - * template has the following format: - * `projects/{project_id}/locations/{location}/workflowTemplates/{template_id}` + *

    + *
  • For `projects.regions.workflowTemplates.instantiate`, the resource name of the + * template has the following format: + * `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}` + *
+ *
    + *
  • For `projects.locations.workflowTemplates.instantiate`, the resource name of the + * template has the following format: + * `projects/{project_id}/locations/{location}/workflowTemplates/{template_id}` + *
+ * * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ - @BetaApi( - "The surface for long-running operations is not stable yet and may change in the future.") public final OperationFuture instantiateWorkflowTemplateAsync( String name) { InstantiateWorkflowTemplateRequest request = @@ -273,7 +432,7 @@ public final OperationFuture instantiateWorkflowTemplat return instantiateWorkflowTemplateAsync(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Instantiates a template and begins execution. * @@ -293,30 +452,23 @@ public final OperationFuture instantiateWorkflowTemplat *

On successful completion, [Operation.response][google.longrunning.Operation.response] will * be [Empty][google.protobuf.Empty]. * - *

Sample code: - * - *


-   * try (WorkflowTemplateServiceClient workflowTemplateServiceClient = WorkflowTemplateServiceClient.create()) {
-   *   WorkflowTemplateName name = WorkflowTemplateName.ofProjectRegionWorkflowTemplateName("[PROJECT]", "[REGION]", "[WORKFLOW_TEMPLATE]");
-   *   Map<String, String> parameters = new HashMap<>();
-   *   workflowTemplateServiceClient.instantiateWorkflowTemplateAsync(name, parameters).get();
-   * }
-   * 
- * * @param name Required. The resource name of the workflow template, as described in * https://cloud.google.com/apis/design/resource_names. - *

* For `projects.regions.workflowTemplates.instantiate`, the resource name of the - * template has the following format: - * `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}` - *

* For `projects.locations.workflowTemplates.instantiate`, the resource name of the - * template has the following format: - * `projects/{project_id}/locations/{location}/workflowTemplates/{template_id}` + *

    + *
  • For `projects.regions.workflowTemplates.instantiate`, the resource name of the + * template has the following format: + * `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}` + *
+ *
    + *
  • For `projects.locations.workflowTemplates.instantiate`, the resource name of the + * template has the following format: + * `projects/{project_id}/locations/{location}/workflowTemplates/{template_id}` + *
+ * * @param parameters Optional. Map from parameter names to values that should be used for those * parameters. Values may not exceed 100 characters. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ - @BetaApi( - "The surface for long-running operations is not stable yet and may change in the future.") public final OperationFuture instantiateWorkflowTemplateAsync( WorkflowTemplateName name, Map parameters) { InstantiateWorkflowTemplateRequest request = @@ -327,7 +479,7 @@ public final OperationFuture instantiateWorkflowTemplat return instantiateWorkflowTemplateAsync(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Instantiates a template and begins execution. * @@ -347,30 +499,23 @@ public final OperationFuture instantiateWorkflowTemplat *

On successful completion, [Operation.response][google.longrunning.Operation.response] will * be [Empty][google.protobuf.Empty]. * - *

Sample code: - * - *


-   * try (WorkflowTemplateServiceClient workflowTemplateServiceClient = WorkflowTemplateServiceClient.create()) {
-   *   WorkflowTemplateName name = WorkflowTemplateName.ofProjectRegionWorkflowTemplateName("[PROJECT]", "[REGION]", "[WORKFLOW_TEMPLATE]");
-   *   Map<String, String> parameters = new HashMap<>();
-   *   workflowTemplateServiceClient.instantiateWorkflowTemplateAsync(name.toString(), parameters).get();
-   * }
-   * 
- * * @param name Required. The resource name of the workflow template, as described in * https://cloud.google.com/apis/design/resource_names. - *

* For `projects.regions.workflowTemplates.instantiate`, the resource name of the - * template has the following format: - * `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}` - *

* For `projects.locations.workflowTemplates.instantiate`, the resource name of the - * template has the following format: - * `projects/{project_id}/locations/{location}/workflowTemplates/{template_id}` + *

    + *
  • For `projects.regions.workflowTemplates.instantiate`, the resource name of the + * template has the following format: + * `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}` + *
+ *
    + *
  • For `projects.locations.workflowTemplates.instantiate`, the resource name of the + * template has the following format: + * `projects/{project_id}/locations/{location}/workflowTemplates/{template_id}` + *
+ * * @param parameters Optional. Map from parameter names to values that should be used for those * parameters. Values may not exceed 100 characters. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ - @BetaApi( - "The surface for long-running operations is not stable yet and may change in the future.") public final OperationFuture instantiateWorkflowTemplateAsync( String name, Map parameters) { InstantiateWorkflowTemplateRequest request = @@ -381,7 +526,7 @@ public final OperationFuture instantiateWorkflowTemplat return instantiateWorkflowTemplateAsync(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Instantiates a template and begins execution. * @@ -401,29 +546,15 @@ public final OperationFuture instantiateWorkflowTemplat *

On successful completion, [Operation.response][google.longrunning.Operation.response] will * be [Empty][google.protobuf.Empty]. * - *

Sample code: - * - *


-   * try (WorkflowTemplateServiceClient workflowTemplateServiceClient = WorkflowTemplateServiceClient.create()) {
-   *   WorkflowTemplateName name = WorkflowTemplateName.ofProjectRegionWorkflowTemplateName("[PROJECT]", "[REGION]", "[WORKFLOW_TEMPLATE]");
-   *   InstantiateWorkflowTemplateRequest request = InstantiateWorkflowTemplateRequest.newBuilder()
-   *     .setName(name.toString())
-   *     .build();
-   *   workflowTemplateServiceClient.instantiateWorkflowTemplateAsync(request).get();
-   * }
-   * 
- * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ - @BetaApi( - "The surface for long-running operations is not stable yet and may change in the future.") public final OperationFuture instantiateWorkflowTemplateAsync( InstantiateWorkflowTemplateRequest request) { return instantiateWorkflowTemplateOperationCallable().futureCall(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Instantiates a template and begins execution. * @@ -444,26 +575,13 @@ public final OperationFuture instantiateWorkflowTemplat * be [Empty][google.protobuf.Empty]. * *

Sample code: - * - *


-   * try (WorkflowTemplateServiceClient workflowTemplateServiceClient = WorkflowTemplateServiceClient.create()) {
-   *   WorkflowTemplateName name = WorkflowTemplateName.ofProjectRegionWorkflowTemplateName("[PROJECT]", "[REGION]", "[WORKFLOW_TEMPLATE]");
-   *   InstantiateWorkflowTemplateRequest request = InstantiateWorkflowTemplateRequest.newBuilder()
-   *     .setName(name.toString())
-   *     .build();
-   *   OperationFuture<Empty, WorkflowMetadata> future = workflowTemplateServiceClient.instantiateWorkflowTemplateOperationCallable().futureCall(request);
-   *   // Do something
-   *   future.get();
-   * }
-   * 
*/ - @BetaApi("The surface for use by generated code is not stable yet and may change in the future.") public final OperationCallable instantiateWorkflowTemplateOperationCallable() { return stub.instantiateWorkflowTemplateOperationCallable(); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Instantiates a template and begins execution. * @@ -484,25 +602,13 @@ public final OperationFuture instantiateWorkflowTemplat * be [Empty][google.protobuf.Empty]. * *

Sample code: - * - *


-   * try (WorkflowTemplateServiceClient workflowTemplateServiceClient = WorkflowTemplateServiceClient.create()) {
-   *   WorkflowTemplateName name = WorkflowTemplateName.ofProjectRegionWorkflowTemplateName("[PROJECT]", "[REGION]", "[WORKFLOW_TEMPLATE]");
-   *   InstantiateWorkflowTemplateRequest request = InstantiateWorkflowTemplateRequest.newBuilder()
-   *     .setName(name.toString())
-   *     .build();
-   *   ApiFuture<Operation> future = workflowTemplateServiceClient.instantiateWorkflowTemplateCallable().futureCall(request);
-   *   // Do something
-   *   future.get();
-   * }
-   * 
*/ public final UnaryCallable instantiateWorkflowTemplateCallable() { return stub.instantiateWorkflowTemplateCallable(); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Instantiates a template and begins execution. * @@ -527,29 +633,22 @@ public final OperationFuture instantiateWorkflowTemplat *

On successful completion, [Operation.response][google.longrunning.Operation.response] will * be [Empty][google.protobuf.Empty]. * - *

Sample code: - * - *


-   * try (WorkflowTemplateServiceClient workflowTemplateServiceClient = WorkflowTemplateServiceClient.create()) {
-   *   RegionName parent = RegionName.of("[PROJECT]", "[REGION]");
-   *   WorkflowTemplate template = WorkflowTemplate.newBuilder().build();
-   *   workflowTemplateServiceClient.instantiateInlineWorkflowTemplateAsync(parent, template).get();
-   * }
-   * 
- * * @param parent Required. The resource name of the region or location, as described in * https://cloud.google.com/apis/design/resource_names. - *

* For `projects.regions.workflowTemplates,instantiateinline`, the resource name of - * the region has the following format: `projects/{project_id}/regions/{region}` - *

* For `projects.locations.workflowTemplates.instantiateinline`, the resource name of - * the location has the following format: `projects/{project_id}/locations/{location}` + *

    + *
  • For `projects.regions.workflowTemplates,instantiateinline`, the resource name of the + * region has the following format: `projects/{project_id}/regions/{region}` + *
+ *
    + *
  • For `projects.locations.workflowTemplates.instantiateinline`, the resource name of + * the location has the following format: `projects/{project_id}/locations/{location}` + *
+ * * @param template Required. The workflow template to instantiate. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ - @BetaApi( - "The surface for long-running operations is not stable yet and may change in the future.") public final OperationFuture instantiateInlineWorkflowTemplateAsync( - RegionName parent, WorkflowTemplate template) { + LocationName parent, WorkflowTemplate template) { InstantiateInlineWorkflowTemplateRequest request = InstantiateInlineWorkflowTemplateRequest.newBuilder() .setParent(parent == null ? null : parent.toString()) @@ -558,7 +657,7 @@ public final OperationFuture instantiateInlineWorkflowT return instantiateInlineWorkflowTemplateAsync(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Instantiates a template and begins execution. * @@ -583,29 +682,22 @@ public final OperationFuture instantiateInlineWorkflowT *

On successful completion, [Operation.response][google.longrunning.Operation.response] will * be [Empty][google.protobuf.Empty]. * - *

Sample code: - * - *


-   * try (WorkflowTemplateServiceClient workflowTemplateServiceClient = WorkflowTemplateServiceClient.create()) {
-   *   LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]");
-   *   WorkflowTemplate template = WorkflowTemplate.newBuilder().build();
-   *   workflowTemplateServiceClient.instantiateInlineWorkflowTemplateAsync(parent, template).get();
-   * }
-   * 
- * * @param parent Required. The resource name of the region or location, as described in * https://cloud.google.com/apis/design/resource_names. - *

* For `projects.regions.workflowTemplates,instantiateinline`, the resource name of - * the region has the following format: `projects/{project_id}/regions/{region}` - *

* For `projects.locations.workflowTemplates.instantiateinline`, the resource name of - * the location has the following format: `projects/{project_id}/locations/{location}` + *

    + *
  • For `projects.regions.workflowTemplates,instantiateinline`, the resource name of the + * region has the following format: `projects/{project_id}/regions/{region}` + *
+ *
    + *
  • For `projects.locations.workflowTemplates.instantiateinline`, the resource name of + * the location has the following format: `projects/{project_id}/locations/{location}` + *
+ * * @param template Required. The workflow template to instantiate. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ - @BetaApi( - "The surface for long-running operations is not stable yet and may change in the future.") public final OperationFuture instantiateInlineWorkflowTemplateAsync( - LocationName parent, WorkflowTemplate template) { + RegionName parent, WorkflowTemplate template) { InstantiateInlineWorkflowTemplateRequest request = InstantiateInlineWorkflowTemplateRequest.newBuilder() .setParent(parent == null ? null : parent.toString()) @@ -614,7 +706,7 @@ public final OperationFuture instantiateInlineWorkflowT return instantiateInlineWorkflowTemplateAsync(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Instantiates a template and begins execution. * @@ -639,27 +731,20 @@ public final OperationFuture instantiateInlineWorkflowT *

On successful completion, [Operation.response][google.longrunning.Operation.response] will * be [Empty][google.protobuf.Empty]. * - *

Sample code: - * - *


-   * try (WorkflowTemplateServiceClient workflowTemplateServiceClient = WorkflowTemplateServiceClient.create()) {
-   *   RegionName parent = RegionName.of("[PROJECT]", "[REGION]");
-   *   WorkflowTemplate template = WorkflowTemplate.newBuilder().build();
-   *   workflowTemplateServiceClient.instantiateInlineWorkflowTemplateAsync(parent.toString(), template).get();
-   * }
-   * 
- * * @param parent Required. The resource name of the region or location, as described in * https://cloud.google.com/apis/design/resource_names. - *

* For `projects.regions.workflowTemplates,instantiateinline`, the resource name of - * the region has the following format: `projects/{project_id}/regions/{region}` - *

* For `projects.locations.workflowTemplates.instantiateinline`, the resource name of - * the location has the following format: `projects/{project_id}/locations/{location}` + *

    + *
  • For `projects.regions.workflowTemplates,instantiateinline`, the resource name of the + * region has the following format: `projects/{project_id}/regions/{region}` + *
+ *
    + *
  • For `projects.locations.workflowTemplates.instantiateinline`, the resource name of + * the location has the following format: `projects/{project_id}/locations/{location}` + *
+ * * @param template Required. The workflow template to instantiate. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ - @BetaApi( - "The surface for long-running operations is not stable yet and may change in the future.") public final OperationFuture instantiateInlineWorkflowTemplateAsync( String parent, WorkflowTemplate template) { InstantiateInlineWorkflowTemplateRequest request = @@ -670,7 +755,7 @@ public final OperationFuture instantiateInlineWorkflowT return instantiateInlineWorkflowTemplateAsync(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Instantiates a template and begins execution. * @@ -695,31 +780,15 @@ public final OperationFuture instantiateInlineWorkflowT *

On successful completion, [Operation.response][google.longrunning.Operation.response] will * be [Empty][google.protobuf.Empty]. * - *

Sample code: - * - *


-   * try (WorkflowTemplateServiceClient workflowTemplateServiceClient = WorkflowTemplateServiceClient.create()) {
-   *   RegionName parent = RegionName.of("[PROJECT]", "[REGION]");
-   *   WorkflowTemplate template = WorkflowTemplate.newBuilder().build();
-   *   InstantiateInlineWorkflowTemplateRequest request = InstantiateInlineWorkflowTemplateRequest.newBuilder()
-   *     .setParent(parent.toString())
-   *     .setTemplate(template)
-   *     .build();
-   *   workflowTemplateServiceClient.instantiateInlineWorkflowTemplateAsync(request).get();
-   * }
-   * 
- * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ - @BetaApi( - "The surface for long-running operations is not stable yet and may change in the future.") public final OperationFuture instantiateInlineWorkflowTemplateAsync( InstantiateInlineWorkflowTemplateRequest request) { return instantiateInlineWorkflowTemplateOperationCallable().futureCall(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Instantiates a template and begins execution. * @@ -745,28 +814,13 @@ public final OperationFuture instantiateInlineWorkflowT * be [Empty][google.protobuf.Empty]. * *

Sample code: - * - *


-   * try (WorkflowTemplateServiceClient workflowTemplateServiceClient = WorkflowTemplateServiceClient.create()) {
-   *   RegionName parent = RegionName.of("[PROJECT]", "[REGION]");
-   *   WorkflowTemplate template = WorkflowTemplate.newBuilder().build();
-   *   InstantiateInlineWorkflowTemplateRequest request = InstantiateInlineWorkflowTemplateRequest.newBuilder()
-   *     .setParent(parent.toString())
-   *     .setTemplate(template)
-   *     .build();
-   *   OperationFuture<Empty, WorkflowMetadata> future = workflowTemplateServiceClient.instantiateInlineWorkflowTemplateOperationCallable().futureCall(request);
-   *   // Do something
-   *   future.get();
-   * }
-   * 
*/ - @BetaApi("The surface for use by generated code is not stable yet and may change in the future.") public final OperationCallable instantiateInlineWorkflowTemplateOperationCallable() { return stub.instantiateInlineWorkflowTemplateOperationCallable(); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Instantiates a template and begins execution. * @@ -792,299 +846,17 @@ public final OperationFuture instantiateInlineWorkflowT * be [Empty][google.protobuf.Empty]. * *

Sample code: - * - *


-   * try (WorkflowTemplateServiceClient workflowTemplateServiceClient = WorkflowTemplateServiceClient.create()) {
-   *   RegionName parent = RegionName.of("[PROJECT]", "[REGION]");
-   *   WorkflowTemplate template = WorkflowTemplate.newBuilder().build();
-   *   InstantiateInlineWorkflowTemplateRequest request = InstantiateInlineWorkflowTemplateRequest.newBuilder()
-   *     .setParent(parent.toString())
-   *     .setTemplate(template)
-   *     .build();
-   *   ApiFuture<Operation> future = workflowTemplateServiceClient.instantiateInlineWorkflowTemplateCallable().futureCall(request);
-   *   // Do something
-   *   future.get();
-   * }
-   * 
*/ public final UnaryCallable instantiateInlineWorkflowTemplateCallable() { return stub.instantiateInlineWorkflowTemplateCallable(); } - // AUTO-GENERATED DOCUMENTATION AND METHOD - /** - * Creates new workflow template. - * - *

Sample code: - * - *


-   * try (WorkflowTemplateServiceClient workflowTemplateServiceClient = WorkflowTemplateServiceClient.create()) {
-   *   RegionName parent = RegionName.of("[PROJECT]", "[REGION]");
-   *   WorkflowTemplate template = WorkflowTemplate.newBuilder().build();
-   *   WorkflowTemplate response = workflowTemplateServiceClient.createWorkflowTemplate(parent, template);
-   * }
-   * 
- * - * @param parent Required. The resource name of the region or location, as described in - * https://cloud.google.com/apis/design/resource_names. - *

* For `projects.regions.workflowTemplates,create`, the resource name of the region - * has the following format: `projects/{project_id}/regions/{region}` - *

* For `projects.locations.workflowTemplates.create`, the resource name of the - * location has the following format: `projects/{project_id}/locations/{location}` - * @param template Required. The Dataproc workflow template to create. - * @throws com.google.api.gax.rpc.ApiException if the remote call fails - */ - public final WorkflowTemplate createWorkflowTemplate( - RegionName parent, WorkflowTemplate template) { - CreateWorkflowTemplateRequest request = - CreateWorkflowTemplateRequest.newBuilder() - .setParent(parent == null ? null : parent.toString()) - .setTemplate(template) - .build(); - return createWorkflowTemplate(request); - } - - // AUTO-GENERATED DOCUMENTATION AND METHOD - /** - * Creates new workflow template. - * - *

Sample code: - * - *


-   * try (WorkflowTemplateServiceClient workflowTemplateServiceClient = WorkflowTemplateServiceClient.create()) {
-   *   LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]");
-   *   WorkflowTemplate template = WorkflowTemplate.newBuilder().build();
-   *   WorkflowTemplate response = workflowTemplateServiceClient.createWorkflowTemplate(parent, template);
-   * }
-   * 
- * - * @param parent Required. The resource name of the region or location, as described in - * https://cloud.google.com/apis/design/resource_names. - *

* For `projects.regions.workflowTemplates,create`, the resource name of the region - * has the following format: `projects/{project_id}/regions/{region}` - *

* For `projects.locations.workflowTemplates.create`, the resource name of the - * location has the following format: `projects/{project_id}/locations/{location}` - * @param template Required. The Dataproc workflow template to create. - * @throws com.google.api.gax.rpc.ApiException if the remote call fails - */ - public final WorkflowTemplate createWorkflowTemplate( - LocationName parent, WorkflowTemplate template) { - CreateWorkflowTemplateRequest request = - CreateWorkflowTemplateRequest.newBuilder() - .setParent(parent == null ? null : parent.toString()) - .setTemplate(template) - .build(); - return createWorkflowTemplate(request); - } - - // AUTO-GENERATED DOCUMENTATION AND METHOD - /** - * Creates new workflow template. - * - *

Sample code: - * - *


-   * try (WorkflowTemplateServiceClient workflowTemplateServiceClient = WorkflowTemplateServiceClient.create()) {
-   *   RegionName parent = RegionName.of("[PROJECT]", "[REGION]");
-   *   WorkflowTemplate template = WorkflowTemplate.newBuilder().build();
-   *   WorkflowTemplate response = workflowTemplateServiceClient.createWorkflowTemplate(parent.toString(), template);
-   * }
-   * 
- * - * @param parent Required. The resource name of the region or location, as described in - * https://cloud.google.com/apis/design/resource_names. - *

* For `projects.regions.workflowTemplates,create`, the resource name of the region - * has the following format: `projects/{project_id}/regions/{region}` - *

* For `projects.locations.workflowTemplates.create`, the resource name of the - * location has the following format: `projects/{project_id}/locations/{location}` - * @param template Required. The Dataproc workflow template to create. - * @throws com.google.api.gax.rpc.ApiException if the remote call fails - */ - public final WorkflowTemplate createWorkflowTemplate(String parent, WorkflowTemplate template) { - CreateWorkflowTemplateRequest request = - CreateWorkflowTemplateRequest.newBuilder().setParent(parent).setTemplate(template).build(); - return createWorkflowTemplate(request); - } - - // AUTO-GENERATED DOCUMENTATION AND METHOD - /** - * Creates new workflow template. - * - *

Sample code: - * - *


-   * try (WorkflowTemplateServiceClient workflowTemplateServiceClient = WorkflowTemplateServiceClient.create()) {
-   *   RegionName parent = RegionName.of("[PROJECT]", "[REGION]");
-   *   WorkflowTemplate template = WorkflowTemplate.newBuilder().build();
-   *   CreateWorkflowTemplateRequest request = CreateWorkflowTemplateRequest.newBuilder()
-   *     .setParent(parent.toString())
-   *     .setTemplate(template)
-   *     .build();
-   *   WorkflowTemplate response = workflowTemplateServiceClient.createWorkflowTemplate(request);
-   * }
-   * 
- * - * @param request The request object containing all of the parameters for the API call. - * @throws com.google.api.gax.rpc.ApiException if the remote call fails - */ - public final WorkflowTemplate createWorkflowTemplate(CreateWorkflowTemplateRequest request) { - return createWorkflowTemplateCallable().call(request); - } - - // AUTO-GENERATED DOCUMENTATION AND METHOD - /** - * Creates new workflow template. - * - *

Sample code: - * - *


-   * try (WorkflowTemplateServiceClient workflowTemplateServiceClient = WorkflowTemplateServiceClient.create()) {
-   *   RegionName parent = RegionName.of("[PROJECT]", "[REGION]");
-   *   WorkflowTemplate template = WorkflowTemplate.newBuilder().build();
-   *   CreateWorkflowTemplateRequest request = CreateWorkflowTemplateRequest.newBuilder()
-   *     .setParent(parent.toString())
-   *     .setTemplate(template)
-   *     .build();
-   *   ApiFuture<WorkflowTemplate> future = workflowTemplateServiceClient.createWorkflowTemplateCallable().futureCall(request);
-   *   // Do something
-   *   WorkflowTemplate response = future.get();
-   * }
-   * 
- */ - public final UnaryCallable - createWorkflowTemplateCallable() { - return stub.createWorkflowTemplateCallable(); - } - - // AUTO-GENERATED DOCUMENTATION AND METHOD - /** - * Retrieves the latest workflow template. - * - *

Can retrieve previously instantiated template by specifying optional version parameter. - * - *

Sample code: - * - *


-   * try (WorkflowTemplateServiceClient workflowTemplateServiceClient = WorkflowTemplateServiceClient.create()) {
-   *   WorkflowTemplateName name = WorkflowTemplateName.ofProjectRegionWorkflowTemplateName("[PROJECT]", "[REGION]", "[WORKFLOW_TEMPLATE]");
-   *   WorkflowTemplate response = workflowTemplateServiceClient.getWorkflowTemplate(name);
-   * }
-   * 
- * - * @param name Required. The resource name of the workflow template, as described in - * https://cloud.google.com/apis/design/resource_names. - *

* For `projects.regions.workflowTemplates.get`, the resource name of the template - * has the following format: - * `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}` - *

* For `projects.locations.workflowTemplates.get`, the resource name of the template - * has the following format: - * `projects/{project_id}/locations/{location}/workflowTemplates/{template_id}` - * @throws com.google.api.gax.rpc.ApiException if the remote call fails - */ - public final WorkflowTemplate getWorkflowTemplate(WorkflowTemplateName name) { - GetWorkflowTemplateRequest request = - GetWorkflowTemplateRequest.newBuilder() - .setName(name == null ? null : name.toString()) - .build(); - return getWorkflowTemplate(request); - } - - // AUTO-GENERATED DOCUMENTATION AND METHOD - /** - * Retrieves the latest workflow template. - * - *

Can retrieve previously instantiated template by specifying optional version parameter. - * - *

Sample code: - * - *


-   * try (WorkflowTemplateServiceClient workflowTemplateServiceClient = WorkflowTemplateServiceClient.create()) {
-   *   WorkflowTemplateName name = WorkflowTemplateName.ofProjectRegionWorkflowTemplateName("[PROJECT]", "[REGION]", "[WORKFLOW_TEMPLATE]");
-   *   WorkflowTemplate response = workflowTemplateServiceClient.getWorkflowTemplate(name.toString());
-   * }
-   * 
- * - * @param name Required. The resource name of the workflow template, as described in - * https://cloud.google.com/apis/design/resource_names. - *

* For `projects.regions.workflowTemplates.get`, the resource name of the template - * has the following format: - * `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}` - *

* For `projects.locations.workflowTemplates.get`, the resource name of the template - * has the following format: - * `projects/{project_id}/locations/{location}/workflowTemplates/{template_id}` - * @throws com.google.api.gax.rpc.ApiException if the remote call fails - */ - public final WorkflowTemplate getWorkflowTemplate(String name) { - GetWorkflowTemplateRequest request = - GetWorkflowTemplateRequest.newBuilder().setName(name).build(); - return getWorkflowTemplate(request); - } - - // AUTO-GENERATED DOCUMENTATION AND METHOD - /** - * Retrieves the latest workflow template. - * - *

Can retrieve previously instantiated template by specifying optional version parameter. - * - *

Sample code: - * - *


-   * try (WorkflowTemplateServiceClient workflowTemplateServiceClient = WorkflowTemplateServiceClient.create()) {
-   *   WorkflowTemplateName name = WorkflowTemplateName.ofProjectRegionWorkflowTemplateName("[PROJECT]", "[REGION]", "[WORKFLOW_TEMPLATE]");
-   *   GetWorkflowTemplateRequest request = GetWorkflowTemplateRequest.newBuilder()
-   *     .setName(name.toString())
-   *     .build();
-   *   WorkflowTemplate response = workflowTemplateServiceClient.getWorkflowTemplate(request);
-   * }
-   * 
- * - * @param request The request object containing all of the parameters for the API call. - * @throws com.google.api.gax.rpc.ApiException if the remote call fails - */ - public final WorkflowTemplate getWorkflowTemplate(GetWorkflowTemplateRequest request) { - return getWorkflowTemplateCallable().call(request); - } - - // AUTO-GENERATED DOCUMENTATION AND METHOD - /** - * Retrieves the latest workflow template. - * - *

Can retrieve previously instantiated template by specifying optional version parameter. - * - *

Sample code: - * - *


-   * try (WorkflowTemplateServiceClient workflowTemplateServiceClient = WorkflowTemplateServiceClient.create()) {
-   *   WorkflowTemplateName name = WorkflowTemplateName.ofProjectRegionWorkflowTemplateName("[PROJECT]", "[REGION]", "[WORKFLOW_TEMPLATE]");
-   *   GetWorkflowTemplateRequest request = GetWorkflowTemplateRequest.newBuilder()
-   *     .setName(name.toString())
-   *     .build();
-   *   ApiFuture<WorkflowTemplate> future = workflowTemplateServiceClient.getWorkflowTemplateCallable().futureCall(request);
-   *   // Do something
-   *   WorkflowTemplate response = future.get();
-   * }
-   * 
- */ - public final UnaryCallable - getWorkflowTemplateCallable() { - return stub.getWorkflowTemplateCallable(); - } - - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Updates (replaces) workflow template. The updated template must contain version that matches * the current server version. * - *

Sample code: - * - *


-   * try (WorkflowTemplateServiceClient workflowTemplateServiceClient = WorkflowTemplateServiceClient.create()) {
-   *   WorkflowTemplate template = WorkflowTemplate.newBuilder().build();
-   *   WorkflowTemplate response = workflowTemplateServiceClient.updateWorkflowTemplate(template);
-   * }
-   * 
- * * @param template Required. The updated workflow template. *

The `template.version` field must match the current version. * @throws com.google.api.gax.rpc.ApiException if the remote call fails @@ -1095,23 +867,11 @@ public final WorkflowTemplate updateWorkflowTemplate(WorkflowTemplate template) return updateWorkflowTemplate(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Updates (replaces) workflow template. The updated template must contain version that matches * the current server version. * - *

Sample code: - * - *


-   * try (WorkflowTemplateServiceClient workflowTemplateServiceClient = WorkflowTemplateServiceClient.create()) {
-   *   WorkflowTemplate template = WorkflowTemplate.newBuilder().build();
-   *   UpdateWorkflowTemplateRequest request = UpdateWorkflowTemplateRequest.newBuilder()
-   *     .setTemplate(template)
-   *     .build();
-   *   WorkflowTemplate response = workflowTemplateServiceClient.updateWorkflowTemplate(request);
-   * }
-   * 
- * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -1119,54 +879,36 @@ public final WorkflowTemplate updateWorkflowTemplate(UpdateWorkflowTemplateReque return updateWorkflowTemplateCallable().call(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Updates (replaces) workflow template. The updated template must contain version that matches * the current server version. * *

Sample code: - * - *


-   * try (WorkflowTemplateServiceClient workflowTemplateServiceClient = WorkflowTemplateServiceClient.create()) {
-   *   WorkflowTemplate template = WorkflowTemplate.newBuilder().build();
-   *   UpdateWorkflowTemplateRequest request = UpdateWorkflowTemplateRequest.newBuilder()
-   *     .setTemplate(template)
-   *     .build();
-   *   ApiFuture<WorkflowTemplate> future = workflowTemplateServiceClient.updateWorkflowTemplateCallable().futureCall(request);
-   *   // Do something
-   *   WorkflowTemplate response = future.get();
-   * }
-   * 
*/ public final UnaryCallable updateWorkflowTemplateCallable() { return stub.updateWorkflowTemplateCallable(); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Lists workflows that match the specified filter in the request. * - *

Sample code: - * - *


-   * try (WorkflowTemplateServiceClient workflowTemplateServiceClient = WorkflowTemplateServiceClient.create()) {
-   *   RegionName parent = RegionName.of("[PROJECT]", "[REGION]");
-   *   for (WorkflowTemplate element : workflowTemplateServiceClient.listWorkflowTemplates(parent).iterateAll()) {
-   *     // doThingsWith(element);
-   *   }
-   * }
-   * 
- * * @param parent Required. The resource name of the region or location, as described in * https://cloud.google.com/apis/design/resource_names. - *

* For `projects.regions.workflowTemplates,list`, the resource name of the region has - * the following format: `projects/{project_id}/regions/{region}` - *

* For `projects.locations.workflowTemplates.list`, the resource name of the location - * has the following format: `projects/{project_id}/locations/{location}` + *

    + *
  • For `projects.regions.workflowTemplates,list`, the resource name of the region has + * the following format: `projects/{project_id}/regions/{region}` + *
+ *
    + *
  • For `projects.locations.workflowTemplates.list`, the resource name of the location + * has the following format: `projects/{project_id}/locations/{location}` + *
+ * * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ - public final ListWorkflowTemplatesPagedResponse listWorkflowTemplates(RegionName parent) { + public final ListWorkflowTemplatesPagedResponse listWorkflowTemplates(LocationName parent) { ListWorkflowTemplatesRequest request = ListWorkflowTemplatesRequest.newBuilder() .setParent(parent == null ? null : parent.toString()) @@ -1174,30 +916,24 @@ public final ListWorkflowTemplatesPagedResponse listWorkflowTemplates(RegionName return listWorkflowTemplates(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Lists workflows that match the specified filter in the request. * - *

Sample code: - * - *


-   * try (WorkflowTemplateServiceClient workflowTemplateServiceClient = WorkflowTemplateServiceClient.create()) {
-   *   LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]");
-   *   for (WorkflowTemplate element : workflowTemplateServiceClient.listWorkflowTemplates(parent).iterateAll()) {
-   *     // doThingsWith(element);
-   *   }
-   * }
-   * 
- * * @param parent Required. The resource name of the region or location, as described in * https://cloud.google.com/apis/design/resource_names. - *

* For `projects.regions.workflowTemplates,list`, the resource name of the region has - * the following format: `projects/{project_id}/regions/{region}` - *

* For `projects.locations.workflowTemplates.list`, the resource name of the location - * has the following format: `projects/{project_id}/locations/{location}` + *

    + *
  • For `projects.regions.workflowTemplates,list`, the resource name of the region has + * the following format: `projects/{project_id}/regions/{region}` + *
+ *
    + *
  • For `projects.locations.workflowTemplates.list`, the resource name of the location + * has the following format: `projects/{project_id}/locations/{location}` + *
+ * * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ - public final ListWorkflowTemplatesPagedResponse listWorkflowTemplates(LocationName parent) { + public final ListWorkflowTemplatesPagedResponse listWorkflowTemplates(RegionName parent) { ListWorkflowTemplatesRequest request = ListWorkflowTemplatesRequest.newBuilder() .setParent(parent == null ? null : parent.toString()) @@ -1205,27 +941,21 @@ public final ListWorkflowTemplatesPagedResponse listWorkflowTemplates(LocationNa return listWorkflowTemplates(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Lists workflows that match the specified filter in the request. * - *

Sample code: - * - *


-   * try (WorkflowTemplateServiceClient workflowTemplateServiceClient = WorkflowTemplateServiceClient.create()) {
-   *   RegionName parent = RegionName.of("[PROJECT]", "[REGION]");
-   *   for (WorkflowTemplate element : workflowTemplateServiceClient.listWorkflowTemplates(parent.toString()).iterateAll()) {
-   *     // doThingsWith(element);
-   *   }
-   * }
-   * 
- * * @param parent Required. The resource name of the region or location, as described in * https://cloud.google.com/apis/design/resource_names. - *

* For `projects.regions.workflowTemplates,list`, the resource name of the region has - * the following format: `projects/{project_id}/regions/{region}` - *

* For `projects.locations.workflowTemplates.list`, the resource name of the location - * has the following format: `projects/{project_id}/locations/{location}` + *

    + *
  • For `projects.regions.workflowTemplates,list`, the resource name of the region has + * the following format: `projects/{project_id}/regions/{region}` + *
+ *
    + *
  • For `projects.locations.workflowTemplates.list`, the resource name of the location + * has the following format: `projects/{project_id}/locations/{location}` + *
+ * * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ public final ListWorkflowTemplatesPagedResponse listWorkflowTemplates(String parent) { @@ -1234,24 +964,10 @@ public final ListWorkflowTemplatesPagedResponse listWorkflowTemplates(String par return listWorkflowTemplates(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Lists workflows that match the specified filter in the request. * - *

Sample code: - * - *


-   * try (WorkflowTemplateServiceClient workflowTemplateServiceClient = WorkflowTemplateServiceClient.create()) {
-   *   RegionName parent = RegionName.of("[PROJECT]", "[REGION]");
-   *   ListWorkflowTemplatesRequest request = ListWorkflowTemplatesRequest.newBuilder()
-   *     .setParent(parent.toString())
-   *     .build();
-   *   for (WorkflowTemplate element : workflowTemplateServiceClient.listWorkflowTemplates(request).iterateAll()) {
-   *     // doThingsWith(element);
-   *   }
-   * }
-   * 
- * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -1260,84 +976,45 @@ public final ListWorkflowTemplatesPagedResponse listWorkflowTemplates( return listWorkflowTemplatesPagedCallable().call(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Lists workflows that match the specified filter in the request. * *

Sample code: - * - *


-   * try (WorkflowTemplateServiceClient workflowTemplateServiceClient = WorkflowTemplateServiceClient.create()) {
-   *   RegionName parent = RegionName.of("[PROJECT]", "[REGION]");
-   *   ListWorkflowTemplatesRequest request = ListWorkflowTemplatesRequest.newBuilder()
-   *     .setParent(parent.toString())
-   *     .build();
-   *   ApiFuture<ListWorkflowTemplatesPagedResponse> future = workflowTemplateServiceClient.listWorkflowTemplatesPagedCallable().futureCall(request);
-   *   // Do something
-   *   for (WorkflowTemplate element : future.get().iterateAll()) {
-   *     // doThingsWith(element);
-   *   }
-   * }
-   * 
*/ public final UnaryCallable listWorkflowTemplatesPagedCallable() { return stub.listWorkflowTemplatesPagedCallable(); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Lists workflows that match the specified filter in the request. * *

Sample code: - * - *


-   * try (WorkflowTemplateServiceClient workflowTemplateServiceClient = WorkflowTemplateServiceClient.create()) {
-   *   RegionName parent = RegionName.of("[PROJECT]", "[REGION]");
-   *   ListWorkflowTemplatesRequest request = ListWorkflowTemplatesRequest.newBuilder()
-   *     .setParent(parent.toString())
-   *     .build();
-   *   while (true) {
-   *     ListWorkflowTemplatesResponse response = workflowTemplateServiceClient.listWorkflowTemplatesCallable().call(request);
-   *     for (WorkflowTemplate element : response.getTemplatesList()) {
-   *       // doThingsWith(element);
-   *     }
-   *     String nextPageToken = response.getNextPageToken();
-   *     if (!Strings.isNullOrEmpty(nextPageToken)) {
-   *       request = request.toBuilder().setPageToken(nextPageToken).build();
-   *     } else {
-   *       break;
-   *     }
-   *   }
-   * }
-   * 
*/ public final UnaryCallable listWorkflowTemplatesCallable() { return stub.listWorkflowTemplatesCallable(); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Deletes a workflow template. It does not cancel in-progress workflows. * - *

Sample code: - * - *


-   * try (WorkflowTemplateServiceClient workflowTemplateServiceClient = WorkflowTemplateServiceClient.create()) {
-   *   WorkflowTemplateName name = WorkflowTemplateName.ofProjectRegionWorkflowTemplateName("[PROJECT]", "[REGION]", "[WORKFLOW_TEMPLATE]");
-   *   workflowTemplateServiceClient.deleteWorkflowTemplate(name);
-   * }
-   * 
- * * @param name Required. The resource name of the workflow template, as described in * https://cloud.google.com/apis/design/resource_names. - *

* For `projects.regions.workflowTemplates.delete`, the resource name of the template - * has the following format: - * `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}` - *

* For `projects.locations.workflowTemplates.instantiate`, the resource name of the - * template has the following format: - * `projects/{project_id}/locations/{location}/workflowTemplates/{template_id}` + *

    + *
  • For `projects.regions.workflowTemplates.delete`, the resource name of the template + * has the following format: + * `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}` + *
+ *
    + *
  • For `projects.locations.workflowTemplates.instantiate`, the resource name of the + * template has the following format: + * `projects/{project_id}/locations/{location}/workflowTemplates/{template_id}` + *
+ * * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ public final void deleteWorkflowTemplate(WorkflowTemplateName name) { @@ -1348,27 +1025,23 @@ public final void deleteWorkflowTemplate(WorkflowTemplateName name) { deleteWorkflowTemplate(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Deletes a workflow template. It does not cancel in-progress workflows. * - *

Sample code: - * - *


-   * try (WorkflowTemplateServiceClient workflowTemplateServiceClient = WorkflowTemplateServiceClient.create()) {
-   *   WorkflowTemplateName name = WorkflowTemplateName.ofProjectRegionWorkflowTemplateName("[PROJECT]", "[REGION]", "[WORKFLOW_TEMPLATE]");
-   *   workflowTemplateServiceClient.deleteWorkflowTemplate(name.toString());
-   * }
-   * 
- * * @param name Required. The resource name of the workflow template, as described in * https://cloud.google.com/apis/design/resource_names. - *

* For `projects.regions.workflowTemplates.delete`, the resource name of the template - * has the following format: - * `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}` - *

* For `projects.locations.workflowTemplates.instantiate`, the resource name of the - * template has the following format: - * `projects/{project_id}/locations/{location}/workflowTemplates/{template_id}` + *

    + *
  • For `projects.regions.workflowTemplates.delete`, the resource name of the template + * has the following format: + * `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}` + *
+ *
    + *
  • For `projects.locations.workflowTemplates.instantiate`, the resource name of the + * template has the following format: + * `projects/{project_id}/locations/{location}/workflowTemplates/{template_id}` + *
+ * * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ public final void deleteWorkflowTemplate(String name) { @@ -1377,22 +1050,10 @@ public final void deleteWorkflowTemplate(String name) { deleteWorkflowTemplate(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Deletes a workflow template. It does not cancel in-progress workflows. * - *

Sample code: - * - *


-   * try (WorkflowTemplateServiceClient workflowTemplateServiceClient = WorkflowTemplateServiceClient.create()) {
-   *   WorkflowTemplateName name = WorkflowTemplateName.ofProjectRegionWorkflowTemplateName("[PROJECT]", "[REGION]", "[WORKFLOW_TEMPLATE]");
-   *   DeleteWorkflowTemplateRequest request = DeleteWorkflowTemplateRequest.newBuilder()
-   *     .setName(name.toString())
-   *     .build();
-   *   workflowTemplateServiceClient.deleteWorkflowTemplate(request);
-   * }
-   * 
- * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -1400,23 +1061,11 @@ public final void deleteWorkflowTemplate(DeleteWorkflowTemplateRequest request) deleteWorkflowTemplateCallable().call(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Deletes a workflow template. It does not cancel in-progress workflows. * *

Sample code: - * - *


-   * try (WorkflowTemplateServiceClient workflowTemplateServiceClient = WorkflowTemplateServiceClient.create()) {
-   *   WorkflowTemplateName name = WorkflowTemplateName.ofProjectRegionWorkflowTemplateName("[PROJECT]", "[REGION]", "[WORKFLOW_TEMPLATE]");
-   *   DeleteWorkflowTemplateRequest request = DeleteWorkflowTemplateRequest.newBuilder()
-   *     .setName(name.toString())
-   *     .build();
-   *   ApiFuture<Void> future = workflowTemplateServiceClient.deleteWorkflowTemplateCallable().futureCall(request);
-   *   // Do something
-   *   future.get();
-   * }
-   * 
*/ public final UnaryCallable deleteWorkflowTemplateCallable() { diff --git a/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/WorkflowTemplateServiceSettings.java b/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/WorkflowTemplateServiceSettings.java index 5ddae67d..03d89c31 100644 --- a/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/WorkflowTemplateServiceSettings.java +++ b/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/WorkflowTemplateServiceSettings.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,6 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.dataproc.v1; import static com.google.cloud.dataproc.v1.WorkflowTemplateServiceClient.ListWorkflowTemplatesPagedResponse; @@ -36,7 +37,7 @@ import java.util.List; import javax.annotation.Generated; -// AUTO-GENERATED DOCUMENTATION AND CLASS +// AUTO-GENERATED DOCUMENTATION AND CLASS. /** * Settings class to configure an instance of {@link WorkflowTemplateServiceClient}. * @@ -53,24 +54,39 @@ * *

For example, to set the total timeout of createWorkflowTemplate to 30 seconds: * - *

- * 
+ * 
{@code
  * WorkflowTemplateServiceSettings.Builder workflowTemplateServiceSettingsBuilder =
  *     WorkflowTemplateServiceSettings.newBuilder();
  * workflowTemplateServiceSettingsBuilder
  *     .createWorkflowTemplateSettings()
  *     .setRetrySettings(
- *         workflowTemplateServiceSettingsBuilder.createWorkflowTemplateSettings().getRetrySettings().toBuilder()
+ *         workflowTemplateServiceSettingsBuilder
+ *             .createWorkflowTemplateSettings()
+ *             .getRetrySettings()
+ *             .toBuilder()
  *             .setTotalTimeout(Duration.ofSeconds(30))
  *             .build());
- * WorkflowTemplateServiceSettings workflowTemplateServiceSettings = workflowTemplateServiceSettingsBuilder.build();
- * 
- * 
+ * WorkflowTemplateServiceSettings workflowTemplateServiceSettings = + * workflowTemplateServiceSettingsBuilder.build(); + * }
*/ -@Generated("by gapic-generator") -@BetaApi +@Generated("by gapic-generator-java") public class WorkflowTemplateServiceSettings extends ClientSettings { + + /** Returns the object with the settings used for calls to createWorkflowTemplate. */ + public UnaryCallSettings + createWorkflowTemplateSettings() { + return ((WorkflowTemplateServiceStubSettings) getStubSettings()) + .createWorkflowTemplateSettings(); + } + + /** Returns the object with the settings used for calls to getWorkflowTemplate. */ + public UnaryCallSettings + getWorkflowTemplateSettings() { + return ((WorkflowTemplateServiceStubSettings) getStubSettings()).getWorkflowTemplateSettings(); + } + /** Returns the object with the settings used for calls to instantiateWorkflowTemplate. */ public UnaryCallSettings instantiateWorkflowTemplateSettings() { @@ -79,8 +95,6 @@ public class WorkflowTemplateServiceSettings } /** Returns the object with the settings used for calls to instantiateWorkflowTemplate. */ - @BetaApi( - "The surface for long-running operations is not stable yet and may change in the future.") public OperationCallSettings instantiateWorkflowTemplateOperationSettings() { return ((WorkflowTemplateServiceStubSettings) getStubSettings()) @@ -95,27 +109,12 @@ public class WorkflowTemplateServiceSettings } /** Returns the object with the settings used for calls to instantiateInlineWorkflowTemplate. */ - @BetaApi( - "The surface for long-running operations is not stable yet and may change in the future.") public OperationCallSettings instantiateInlineWorkflowTemplateOperationSettings() { return ((WorkflowTemplateServiceStubSettings) getStubSettings()) .instantiateInlineWorkflowTemplateOperationSettings(); } - /** Returns the object with the settings used for calls to createWorkflowTemplate. */ - public UnaryCallSettings - createWorkflowTemplateSettings() { - return ((WorkflowTemplateServiceStubSettings) getStubSettings()) - .createWorkflowTemplateSettings(); - } - - /** Returns the object with the settings used for calls to getWorkflowTemplate. */ - public UnaryCallSettings - getWorkflowTemplateSettings() { - return ((WorkflowTemplateServiceStubSettings) getStubSettings()).getWorkflowTemplateSettings(); - } - /** Returns the object with the settings used for calls to updateWorkflowTemplate. */ public UnaryCallSettings updateWorkflowTemplateSettings() { @@ -200,18 +199,15 @@ protected WorkflowTemplateServiceSettings(Builder settingsBuilder) throws IOExce /** Builder for WorkflowTemplateServiceSettings. */ public static class Builder extends ClientSettings.Builder { + protected Builder() throws IOException { - this((ClientContext) null); + this(((ClientContext) null)); } protected Builder(ClientContext clientContext) { super(WorkflowTemplateServiceStubSettings.newBuilder(clientContext)); } - private static Builder createDefault() { - return new Builder(WorkflowTemplateServiceStubSettings.newBuilder()); - } - protected Builder(WorkflowTemplateServiceSettings settings) { super(settings.getStubSettings().toBuilder()); } @@ -220,11 +216,15 @@ protected Builder(WorkflowTemplateServiceStubSettings.Builder stubSettings) { super(stubSettings); } + private static Builder createDefault() { + return new Builder(WorkflowTemplateServiceStubSettings.newBuilder()); + } + public WorkflowTemplateServiceStubSettings.Builder getStubSettingsBuilder() { return ((WorkflowTemplateServiceStubSettings.Builder) getStubSettings()); } - // NEXT_MAJOR_VER: remove 'throws Exception' + // NEXT_MAJOR_VER: remove 'throws Exception'. /** * Applies the given settings updater function to all of the unary API methods in this service. * @@ -237,6 +237,18 @@ public Builder applyToAllUnaryMethods( return this; } + /** Returns the builder for the settings used for calls to createWorkflowTemplate. */ + public UnaryCallSettings.Builder + createWorkflowTemplateSettings() { + return getStubSettingsBuilder().createWorkflowTemplateSettings(); + } + + /** Returns the builder for the settings used for calls to getWorkflowTemplate. */ + public UnaryCallSettings.Builder + getWorkflowTemplateSettings() { + return getStubSettingsBuilder().getWorkflowTemplateSettings(); + } + /** Returns the builder for the settings used for calls to instantiateWorkflowTemplate. */ public UnaryCallSettings.Builder instantiateWorkflowTemplateSettings() { @@ -244,8 +256,6 @@ public Builder applyToAllUnaryMethods( } /** Returns the builder for the settings used for calls to instantiateWorkflowTemplate. */ - @BetaApi( - "The surface for long-running operations is not stable yet and may change in the future.") public OperationCallSettings.Builder< InstantiateWorkflowTemplateRequest, Empty, WorkflowMetadata> instantiateWorkflowTemplateOperationSettings() { @@ -259,26 +269,12 @@ public Builder applyToAllUnaryMethods( } /** Returns the builder for the settings used for calls to instantiateInlineWorkflowTemplate. */ - @BetaApi( - "The surface for long-running operations is not stable yet and may change in the future.") public OperationCallSettings.Builder< InstantiateInlineWorkflowTemplateRequest, Empty, WorkflowMetadata> instantiateInlineWorkflowTemplateOperationSettings() { return getStubSettingsBuilder().instantiateInlineWorkflowTemplateOperationSettings(); } - /** Returns the builder for the settings used for calls to createWorkflowTemplate. */ - public UnaryCallSettings.Builder - createWorkflowTemplateSettings() { - return getStubSettingsBuilder().createWorkflowTemplateSettings(); - } - - /** Returns the builder for the settings used for calls to getWorkflowTemplate. */ - public UnaryCallSettings.Builder - getWorkflowTemplateSettings() { - return getStubSettingsBuilder().getWorkflowTemplateSettings(); - } - /** Returns the builder for the settings used for calls to updateWorkflowTemplate. */ public UnaryCallSettings.Builder updateWorkflowTemplateSettings() { diff --git a/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/package-info.java b/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/package-info.java index 4072e7af..4931bb50 100644 --- a/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/package-info.java +++ b/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/package-info.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,78 +15,34 @@ */ /** - * A client to Cloud Dataproc API. + * The interfaces provided are listed below, along with usage samples. * - *

The interfaces provided are listed below, along with usage samples. - * - *

============================== AutoscalingPolicyServiceClient ============================== + *

======================= AutoscalingPolicyServiceClient ======================= * *

Service Description: The API interface for managing autoscaling policies in the Dataproc API. * *

Sample for AutoscalingPolicyServiceClient: * - *

- * 
- * try (AutoscalingPolicyServiceClient autoscalingPolicyServiceClient = AutoscalingPolicyServiceClient.create()) {
- *   RegionName parent = RegionName.of("[PROJECT]", "[REGION]");
- *   AutoscalingPolicy policy = AutoscalingPolicy.newBuilder().build();
- *   AutoscalingPolicy response = autoscalingPolicyServiceClient.createAutoscalingPolicy(parent, policy);
- * }
- * 
- * 
- * - * ======================= ClusterControllerClient ======================= + *

======================= ClusterControllerClient ======================= * *

Service Description: The ClusterControllerService provides methods to manage clusters of * Compute Engine instances. * *

Sample for ClusterControllerClient: * - *

- * 
- * try (ClusterControllerClient clusterControllerClient = ClusterControllerClient.create()) {
- *   String projectId = "";
- *   String region = "";
- *   String clusterName = "";
- *   Cluster response = clusterControllerClient.getCluster(projectId, region, clusterName);
- * }
- * 
- * 
- * - * =================== JobControllerClient =================== + *

======================= JobControllerClient ======================= * *

Service Description: The JobController provides methods to manage jobs. * *

Sample for JobControllerClient: * - *

- * 
- * try (JobControllerClient jobControllerClient = JobControllerClient.create()) {
- *   String projectId = "";
- *   String region = "";
- *   Job job = Job.newBuilder().build();
- *   Job response = jobControllerClient.submitJob(projectId, region, job);
- * }
- * 
- * 
- * - * ============================= WorkflowTemplateServiceClient ============================= + *

======================= WorkflowTemplateServiceClient ======================= * *

Service Description: The API interface for managing Workflow Templates in the Dataproc API. * *

Sample for WorkflowTemplateServiceClient: - * - *

- * 
- * try (WorkflowTemplateServiceClient workflowTemplateServiceClient = WorkflowTemplateServiceClient.create()) {
- *   RegionName parent = RegionName.of("[PROJECT]", "[REGION]");
- *   WorkflowTemplate template = WorkflowTemplate.newBuilder().build();
- *   WorkflowTemplate response = workflowTemplateServiceClient.createWorkflowTemplate(parent, template);
- * }
- * 
- * 
*/ -@Generated("by gapic-generator") +@Generated("by gapic-generator-java") package com.google.cloud.dataproc.v1; import javax.annotation.Generated; diff --git a/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/stub/AutoscalingPolicyServiceStub.java b/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/stub/AutoscalingPolicyServiceStub.java index 01685310..af1cf752 100644 --- a/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/stub/AutoscalingPolicyServiceStub.java +++ b/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/stub/AutoscalingPolicyServiceStub.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,11 +13,11 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.dataproc.v1.stub; import static com.google.cloud.dataproc.v1.AutoscalingPolicyServiceClient.ListAutoscalingPoliciesPagedResponse; -import com.google.api.core.BetaApi; import com.google.api.gax.core.BackgroundResource; import com.google.api.gax.rpc.UnaryCallable; import com.google.cloud.dataproc.v1.AutoscalingPolicy; @@ -30,14 +30,13 @@ import com.google.protobuf.Empty; import javax.annotation.Generated; -// AUTO-GENERATED DOCUMENTATION AND CLASS +// AUTO-GENERATED DOCUMENTATION AND CLASS. /** - * Base stub class for Cloud Dataproc API. + * Base stub class for the AutoscalingPolicyService service API. * *

This class is for advanced usage and reflects the underlying API directly. */ @Generated("by gapic-generator") -@BetaApi("A restructuring of stub classes is planned, so this may break in the future") public abstract class AutoscalingPolicyServiceStub implements BackgroundResource { public UnaryCallable diff --git a/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/stub/AutoscalingPolicyServiceStubSettings.java b/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/stub/AutoscalingPolicyServiceStubSettings.java index 07a89b98..58e91c6b 100644 --- a/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/stub/AutoscalingPolicyServiceStubSettings.java +++ b/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/stub/AutoscalingPolicyServiceStubSettings.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,6 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.dataproc.v1.stub; import static com.google.cloud.dataproc.v1.AutoscalingPolicyServiceClient.ListAutoscalingPoliciesPagedResponse; @@ -56,7 +57,7 @@ import javax.annotation.Generated; import org.threeten.bp.Duration; -// AUTO-GENERATED DOCUMENTATION AND CLASS +// AUTO-GENERATED DOCUMENTATION AND CLASS. /** * Settings class to configure an instance of {@link AutoscalingPolicyServiceStub}. * @@ -73,22 +74,24 @@ * *

For example, to set the total timeout of createAutoscalingPolicy to 30 seconds: * - *

- * 
+ * 
{@code
  * AutoscalingPolicyServiceStubSettings.Builder autoscalingPolicyServiceSettingsBuilder =
  *     AutoscalingPolicyServiceStubSettings.newBuilder();
  * autoscalingPolicyServiceSettingsBuilder
  *     .createAutoscalingPolicySettings()
  *     .setRetrySettings(
- *         autoscalingPolicyServiceSettingsBuilder.createAutoscalingPolicySettings().getRetrySettings().toBuilder()
+ *         autoscalingPolicyServiceSettingsBuilder
+ *             .createAutoscalingPolicySettings()
+ *             .getRetrySettings()
+ *             .toBuilder()
  *             .setTotalTimeout(Duration.ofSeconds(30))
  *             .build());
- * AutoscalingPolicyServiceStubSettings autoscalingPolicyServiceSettings = autoscalingPolicyServiceSettingsBuilder.build();
- * 
- * 
+ * AutoscalingPolicyServiceStubSettings autoscalingPolicyServiceSettings = + * autoscalingPolicyServiceSettingsBuilder.build(); + * }
*/ -@Generated("by gapic-generator") @BetaApi +@Generated("by gapic-generator-java") public class AutoscalingPolicyServiceStubSettings extends StubSettings { /** The default scopes of the service. */ @@ -109,6 +112,78 @@ public class AutoscalingPolicyServiceStubSettings private final UnaryCallSettings deleteAutoscalingPolicySettings; + private static final PagedListDescriptor< + ListAutoscalingPoliciesRequest, ListAutoscalingPoliciesResponse, AutoscalingPolicy> + LIST_AUTOSCALING_POLICIES_PAGE_STR_DESC = + new PagedListDescriptor< + ListAutoscalingPoliciesRequest, + ListAutoscalingPoliciesResponse, + AutoscalingPolicy>() { + @Override + public String emptyToken() { + return ""; + } + + @Override + public ListAutoscalingPoliciesRequest injectToken( + ListAutoscalingPoliciesRequest payload, String token) { + return ListAutoscalingPoliciesRequest.newBuilder(payload).setPageToken(token).build(); + } + + @Override + public ListAutoscalingPoliciesRequest injectPageSize( + ListAutoscalingPoliciesRequest payload, int pageSize) { + return ListAutoscalingPoliciesRequest.newBuilder(payload) + .setPageSize(pageSize) + .build(); + } + + @Override + public Integer extractPageSize(ListAutoscalingPoliciesRequest payload) { + return payload.getPageSize(); + } + + @Override + public String extractNextToken(ListAutoscalingPoliciesResponse payload) { + return payload.getNextPageToken(); + } + + @Override + public Iterable extractResources( + ListAutoscalingPoliciesResponse payload) { + return payload.getPoliciesList() == null + ? ImmutableList.of() + : payload.getPoliciesList(); + } + }; + + private static final PagedListResponseFactory< + ListAutoscalingPoliciesRequest, + ListAutoscalingPoliciesResponse, + ListAutoscalingPoliciesPagedResponse> + LIST_AUTOSCALING_POLICIES_PAGE_STR_FACT = + new PagedListResponseFactory< + ListAutoscalingPoliciesRequest, + ListAutoscalingPoliciesResponse, + ListAutoscalingPoliciesPagedResponse>() { + @Override + public ApiFuture getFuturePagedResponse( + UnaryCallable + callable, + ListAutoscalingPoliciesRequest request, + ApiCallContext context, + ApiFuture futureResponse) { + PageContext< + ListAutoscalingPoliciesRequest, + ListAutoscalingPoliciesResponse, + AutoscalingPolicy> + pageContext = + PageContext.create( + callable, LIST_AUTOSCALING_POLICIES_PAGE_STR_DESC, request, context); + return ListAutoscalingPoliciesPagedResponse.createAsync(pageContext, futureResponse); + } + }; + /** Returns the object with the settings used for calls to createAutoscalingPolicy. */ public UnaryCallSettings createAutoscalingPolicySettings() { @@ -148,10 +223,10 @@ public AutoscalingPolicyServiceStub createStub() throws IOException { .getTransportName() .equals(GrpcTransportChannel.getGrpcTransportName())) { return GrpcAutoscalingPolicyServiceStub.create(this); - } else { - throw new UnsupportedOperationException( - "Transport not supported: " + getTransportChannelProvider().getTransportName()); } + throw new UnsupportedOperationException( + String.format( + "Transport not supported: %s", getTransportChannelProvider().getTransportName())); } /** Returns a builder for the default ExecutorProvider for this service. */ @@ -218,83 +293,10 @@ protected AutoscalingPolicyServiceStubSettings(Builder settingsBuilder) throws I deleteAutoscalingPolicySettings = settingsBuilder.deleteAutoscalingPolicySettings().build(); } - private static final PagedListDescriptor< - ListAutoscalingPoliciesRequest, ListAutoscalingPoliciesResponse, AutoscalingPolicy> - LIST_AUTOSCALING_POLICIES_PAGE_STR_DESC = - new PagedListDescriptor< - ListAutoscalingPoliciesRequest, - ListAutoscalingPoliciesResponse, - AutoscalingPolicy>() { - @Override - public String emptyToken() { - return ""; - } - - @Override - public ListAutoscalingPoliciesRequest injectToken( - ListAutoscalingPoliciesRequest payload, String token) { - return ListAutoscalingPoliciesRequest.newBuilder(payload).setPageToken(token).build(); - } - - @Override - public ListAutoscalingPoliciesRequest injectPageSize( - ListAutoscalingPoliciesRequest payload, int pageSize) { - return ListAutoscalingPoliciesRequest.newBuilder(payload) - .setPageSize(pageSize) - .build(); - } - - @Override - public Integer extractPageSize(ListAutoscalingPoliciesRequest payload) { - return payload.getPageSize(); - } - - @Override - public String extractNextToken(ListAutoscalingPoliciesResponse payload) { - return payload.getNextPageToken(); - } - - @Override - public Iterable extractResources( - ListAutoscalingPoliciesResponse payload) { - return payload.getPoliciesList() != null - ? payload.getPoliciesList() - : ImmutableList.of(); - } - }; - - private static final PagedListResponseFactory< - ListAutoscalingPoliciesRequest, - ListAutoscalingPoliciesResponse, - ListAutoscalingPoliciesPagedResponse> - LIST_AUTOSCALING_POLICIES_PAGE_STR_FACT = - new PagedListResponseFactory< - ListAutoscalingPoliciesRequest, - ListAutoscalingPoliciesResponse, - ListAutoscalingPoliciesPagedResponse>() { - @Override - public ApiFuture getFuturePagedResponse( - UnaryCallable - callable, - ListAutoscalingPoliciesRequest request, - ApiCallContext context, - ApiFuture futureResponse) { - PageContext< - ListAutoscalingPoliciesRequest, - ListAutoscalingPoliciesResponse, - AutoscalingPolicy> - pageContext = - PageContext.create( - callable, LIST_AUTOSCALING_POLICIES_PAGE_STR_DESC, request, context); - return ListAutoscalingPoliciesPagedResponse.createAsync(pageContext, futureResponse); - } - }; - /** Builder for AutoscalingPolicyServiceStubSettings. */ public static class Builder extends StubSettings.Builder { private final ImmutableList> unaryMethodSettingsBuilders; - private final UnaryCallSettings.Builder createAutoscalingPolicySettings; private final UnaryCallSettings.Builder @@ -308,7 +310,6 @@ public static class Builder listAutoscalingPoliciesSettings; private final UnaryCallSettings.Builder deleteAutoscalingPolicySettings; - private static final ImmutableMap> RETRYABLE_CODE_DEFINITIONS; @@ -316,43 +317,12 @@ public static class Builder ImmutableMap.Builder> definitions = ImmutableMap.builder(); definitions.put( - "retry_policy_4_codes", - ImmutableSet.copyOf( - Lists.newArrayList( - StatusCode.Code.DEADLINE_EXCEEDED, - StatusCode.Code.INTERNAL, - StatusCode.Code.UNAVAILABLE))); + "no_retry_0_codes", ImmutableSet.copyOf(Lists.newArrayList())); definitions.put( "retry_policy_1_codes", ImmutableSet.copyOf( Lists.newArrayList( StatusCode.Code.DEADLINE_EXCEEDED, StatusCode.Code.UNAVAILABLE))); - definitions.put( - "retry_policy_6_codes", - ImmutableSet.copyOf( - Lists.newArrayList( - StatusCode.Code.INTERNAL, - StatusCode.Code.DEADLINE_EXCEEDED, - StatusCode.Code.UNAVAILABLE))); - definitions.put("no_retry_codes", ImmutableSet.copyOf(Lists.newArrayList())); - definitions.put( - "retry_policy_3_codes", - ImmutableSet.copyOf(Lists.newArrayList(StatusCode.Code.UNAVAILABLE))); - definitions.put( - "retry_policy_2_codes", - ImmutableSet.copyOf( - Lists.newArrayList( - StatusCode.Code.DEADLINE_EXCEEDED, - StatusCode.Code.INTERNAL, - StatusCode.Code.UNAVAILABLE))); - definitions.put( - "no_retry_1_codes", ImmutableSet.copyOf(Lists.newArrayList())); - definitions.put( - "retry_policy_5_codes", - ImmutableSet.copyOf(Lists.newArrayList(StatusCode.Code.UNAVAILABLE))); - definitions.put( - "retry_policy_7_codes", - ImmutableSet.copyOf(Lists.newArrayList(StatusCode.Code.UNAVAILABLE))); RETRYABLE_CODE_DEFINITIONS = definitions.build(); } @@ -363,15 +333,12 @@ public static class Builder RetrySettings settings = null; settings = RetrySettings.newBuilder() - .setInitialRetryDelay(Duration.ofMillis(100L)) - .setRetryDelayMultiplier(1.3) - .setMaxRetryDelay(Duration.ofMillis(60000L)) .setInitialRpcTimeout(Duration.ofMillis(600000L)) .setRpcTimeoutMultiplier(1.0) .setMaxRpcTimeout(Duration.ofMillis(600000L)) .setTotalTimeout(Duration.ofMillis(600000L)) .build(); - definitions.put("retry_policy_1_params", settings); + definitions.put("no_retry_0_params", settings); settings = RetrySettings.newBuilder() .setInitialRetryDelay(Duration.ofMillis(100L)) @@ -382,91 +349,22 @@ public static class Builder .setMaxRpcTimeout(Duration.ofMillis(600000L)) .setTotalTimeout(Duration.ofMillis(600000L)) .build(); - definitions.put("retry_policy_3_params", settings); - settings = - RetrySettings.newBuilder() - .setInitialRetryDelay(Duration.ofMillis(100L)) - .setRetryDelayMultiplier(1.3) - .setMaxRetryDelay(Duration.ofMillis(60000L)) - .setInitialRpcTimeout(Duration.ofMillis(900000L)) - .setRpcTimeoutMultiplier(1.0) - .setMaxRpcTimeout(Duration.ofMillis(900000L)) - .setTotalTimeout(Duration.ofMillis(900000L)) - .build(); - definitions.put("retry_policy_2_params", settings); - settings = - RetrySettings.newBuilder() - .setInitialRetryDelay(Duration.ofMillis(100L)) - .setRetryDelayMultiplier(1.3) - .setMaxRetryDelay(Duration.ofMillis(60000L)) - .setInitialRpcTimeout(Duration.ofMillis(300000L)) - .setRpcTimeoutMultiplier(1.0) - .setMaxRpcTimeout(Duration.ofMillis(300000L)) - .setTotalTimeout(Duration.ofMillis(300000L)) - .build(); - definitions.put("retry_policy_6_params", settings); - settings = - RetrySettings.newBuilder() - .setInitialRetryDelay(Duration.ofMillis(100L)) - .setRetryDelayMultiplier(1.3) - .setMaxRetryDelay(Duration.ofMillis(60000L)) - .setInitialRpcTimeout(Duration.ofMillis(900000L)) - .setRpcTimeoutMultiplier(1.0) - .setMaxRpcTimeout(Duration.ofMillis(900000L)) - .setTotalTimeout(Duration.ofMillis(900000L)) - .build(); - definitions.put("retry_policy_7_params", settings); - settings = - RetrySettings.newBuilder() - .setInitialRetryDelay(Duration.ofMillis(100L)) - .setRetryDelayMultiplier(1.3) - .setMaxRetryDelay(Duration.ofMillis(60000L)) - .setInitialRpcTimeout(Duration.ofMillis(300000L)) - .setRpcTimeoutMultiplier(1.0) - .setMaxRpcTimeout(Duration.ofMillis(300000L)) - .setTotalTimeout(Duration.ofMillis(300000L)) - .build(); - definitions.put("retry_policy_5_params", settings); - settings = - RetrySettings.newBuilder() - .setInitialRetryDelay(Duration.ofMillis(100L)) - .setRetryDelayMultiplier(1.3) - .setMaxRetryDelay(Duration.ofMillis(60000L)) - .setInitialRpcTimeout(Duration.ofMillis(600000L)) - .setRpcTimeoutMultiplier(1.0) - .setMaxRpcTimeout(Duration.ofMillis(600000L)) - .setTotalTimeout(Duration.ofMillis(600000L)) - .build(); - definitions.put("retry_policy_4_params", settings); - settings = RetrySettings.newBuilder().setRpcTimeoutMultiplier(1.0).build(); - definitions.put("no_retry_params", settings); - settings = - RetrySettings.newBuilder() - .setInitialRpcTimeout(Duration.ofMillis(600000L)) - .setRpcTimeoutMultiplier(1.0) - .setMaxRpcTimeout(Duration.ofMillis(600000L)) - .setTotalTimeout(Duration.ofMillis(600000L)) - .build(); - definitions.put("no_retry_1_params", settings); + definitions.put("retry_policy_1_params", settings); RETRY_PARAM_DEFINITIONS = definitions.build(); } protected Builder() { - this((ClientContext) null); + this(((ClientContext) null)); } protected Builder(ClientContext clientContext) { super(clientContext); createAutoscalingPolicySettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); - updateAutoscalingPolicySettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); - getAutoscalingPolicySettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); - listAutoscalingPoliciesSettings = PagedCallSettings.newBuilder(LIST_AUTOSCALING_POLICIES_PAGE_STR_FACT); - deleteAutoscalingPolicySettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); unaryMethodSettingsBuilders = @@ -476,25 +374,43 @@ protected Builder(ClientContext clientContext) { getAutoscalingPolicySettings, listAutoscalingPoliciesSettings, deleteAutoscalingPolicySettings); - initDefaults(this); } + protected Builder(AutoscalingPolicyServiceStubSettings settings) { + super(settings); + + createAutoscalingPolicySettings = settings.createAutoscalingPolicySettings.toBuilder(); + updateAutoscalingPolicySettings = settings.updateAutoscalingPolicySettings.toBuilder(); + getAutoscalingPolicySettings = settings.getAutoscalingPolicySettings.toBuilder(); + listAutoscalingPoliciesSettings = settings.listAutoscalingPoliciesSettings.toBuilder(); + deleteAutoscalingPolicySettings = settings.deleteAutoscalingPolicySettings.toBuilder(); + + unaryMethodSettingsBuilders = + ImmutableList.>of( + createAutoscalingPolicySettings, + updateAutoscalingPolicySettings, + getAutoscalingPolicySettings, + listAutoscalingPoliciesSettings, + deleteAutoscalingPolicySettings); + } + private static Builder createDefault() { - Builder builder = new Builder((ClientContext) null); + Builder builder = new Builder(((ClientContext) null)); + builder.setTransportChannelProvider(defaultTransportChannelProvider()); builder.setCredentialsProvider(defaultCredentialsProviderBuilder().build()); builder.setInternalHeaderProvider(defaultApiClientHeaderProviderBuilder().build()); builder.setEndpoint(getDefaultEndpoint()); + return initDefaults(builder); } private static Builder initDefaults(Builder builder) { - builder .createAutoscalingPolicySettings() - .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes")) - .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params")); + .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_0_codes")) + .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_0_params")); builder .updateAutoscalingPolicySettings() @@ -513,31 +429,13 @@ private static Builder initDefaults(Builder builder) { builder .deleteAutoscalingPolicySettings() - .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes")) - .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params")); + .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_0_codes")) + .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_0_params")); return builder; } - protected Builder(AutoscalingPolicyServiceStubSettings settings) { - super(settings); - - createAutoscalingPolicySettings = settings.createAutoscalingPolicySettings.toBuilder(); - updateAutoscalingPolicySettings = settings.updateAutoscalingPolicySettings.toBuilder(); - getAutoscalingPolicySettings = settings.getAutoscalingPolicySettings.toBuilder(); - listAutoscalingPoliciesSettings = settings.listAutoscalingPoliciesSettings.toBuilder(); - deleteAutoscalingPolicySettings = settings.deleteAutoscalingPolicySettings.toBuilder(); - - unaryMethodSettingsBuilders = - ImmutableList.>of( - createAutoscalingPolicySettings, - updateAutoscalingPolicySettings, - getAutoscalingPolicySettings, - listAutoscalingPoliciesSettings, - deleteAutoscalingPolicySettings); - } - - // NEXT_MAJOR_VER: remove 'throws Exception' + // NEXT_MAJOR_VER: remove 'throws Exception'. /** * Applies the given settings updater function to all of the unary API methods in this service. * diff --git a/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/stub/ClusterControllerStub.java b/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/stub/ClusterControllerStub.java index e1fbf8c3..a746102a 100644 --- a/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/stub/ClusterControllerStub.java +++ b/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/stub/ClusterControllerStub.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,11 +13,11 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.dataproc.v1.stub; import static com.google.cloud.dataproc.v1.ClusterControllerClient.ListClustersPagedResponse; -import com.google.api.core.BetaApi; import com.google.api.gax.core.BackgroundResource; import com.google.api.gax.rpc.OperationCallable; import com.google.api.gax.rpc.UnaryCallable; @@ -36,22 +36,19 @@ import com.google.protobuf.Empty; import javax.annotation.Generated; -// AUTO-GENERATED DOCUMENTATION AND CLASS +// AUTO-GENERATED DOCUMENTATION AND CLASS. /** - * Base stub class for Cloud Dataproc API. + * Base stub class for the ClusterController service API. * *

This class is for advanced usage and reflects the underlying API directly. */ @Generated("by gapic-generator") -@BetaApi("A restructuring of stub classes is planned, so this may break in the future") public abstract class ClusterControllerStub implements BackgroundResource { - @BetaApi("The surface for use by generated code is not stable yet and may change in the future.") public OperationsStub getOperationsStub() { throw new UnsupportedOperationException("Not implemented: getOperationsStub()"); } - @BetaApi("The surface for use by generated code is not stable yet and may change in the future.") public OperationCallable createClusterOperationCallable() { throw new UnsupportedOperationException("Not implemented: createClusterOperationCallable()"); @@ -61,7 +58,6 @@ public UnaryCallable createClusterCallable() { throw new UnsupportedOperationException("Not implemented: createClusterCallable()"); } - @BetaApi("The surface for use by generated code is not stable yet and may change in the future.") public OperationCallable updateClusterOperationCallable() { throw new UnsupportedOperationException("Not implemented: updateClusterOperationCallable()"); @@ -71,7 +67,6 @@ public UnaryCallable updateClusterCallable() { throw new UnsupportedOperationException("Not implemented: updateClusterCallable()"); } - @BetaApi("The surface for use by generated code is not stable yet and may change in the future.") public OperationCallable deleteClusterOperationCallable() { throw new UnsupportedOperationException("Not implemented: deleteClusterOperationCallable()"); @@ -81,16 +76,6 @@ public UnaryCallable deleteClusterCallable() { throw new UnsupportedOperationException("Not implemented: deleteClusterCallable()"); } - @BetaApi("The surface for use by generated code is not stable yet and may change in the future.") - public OperationCallable - diagnoseClusterOperationCallable() { - throw new UnsupportedOperationException("Not implemented: diagnoseClusterOperationCallable()"); - } - - public UnaryCallable diagnoseClusterCallable() { - throw new UnsupportedOperationException("Not implemented: diagnoseClusterCallable()"); - } - public UnaryCallable getClusterCallable() { throw new UnsupportedOperationException("Not implemented: getClusterCallable()"); } @@ -103,6 +88,15 @@ public UnaryCallable listClustersCall throw new UnsupportedOperationException("Not implemented: listClustersCallable()"); } + public OperationCallable + diagnoseClusterOperationCallable() { + throw new UnsupportedOperationException("Not implemented: diagnoseClusterOperationCallable()"); + } + + public UnaryCallable diagnoseClusterCallable() { + throw new UnsupportedOperationException("Not implemented: diagnoseClusterCallable()"); + } + @Override public abstract void close(); } diff --git a/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/stub/ClusterControllerStubSettings.java b/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/stub/ClusterControllerStubSettings.java index 1d232648..3fc25f55 100644 --- a/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/stub/ClusterControllerStubSettings.java +++ b/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/stub/ClusterControllerStubSettings.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,6 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.dataproc.v1.stub; import static com.google.cloud.dataproc.v1.ClusterControllerClient.ListClustersPagedResponse; @@ -64,7 +65,7 @@ import javax.annotation.Generated; import org.threeten.bp.Duration; -// AUTO-GENERATED DOCUMENTATION AND CLASS +// AUTO-GENERATED DOCUMENTATION AND CLASS. /** * Settings class to configure an instance of {@link ClusterControllerStub}. * @@ -81,22 +82,24 @@ * *

For example, to set the total timeout of getCluster to 30 seconds: * - *

- * 
+ * 
{@code
  * ClusterControllerStubSettings.Builder clusterControllerSettingsBuilder =
  *     ClusterControllerStubSettings.newBuilder();
  * clusterControllerSettingsBuilder
  *     .getClusterSettings()
  *     .setRetrySettings(
- *         clusterControllerSettingsBuilder.getClusterSettings().getRetrySettings().toBuilder()
+ *         clusterControllerSettingsBuilder
+ *             .getClusterSettings()
+ *             .getRetrySettings()
+ *             .toBuilder()
  *             .setTotalTimeout(Duration.ofSeconds(30))
  *             .build());
- * ClusterControllerStubSettings clusterControllerSettings = clusterControllerSettingsBuilder.build();
- * 
- * 
+ * ClusterControllerStubSettings clusterControllerSettings = + * clusterControllerSettingsBuilder.build(); + * }
*/ -@Generated("by gapic-generator") @BetaApi +@Generated("by gapic-generator-java") public class ClusterControllerStubSettings extends StubSettings { /** The default scopes of the service. */ private static final ImmutableList DEFAULT_SERVICE_SCOPES = @@ -111,14 +114,67 @@ public class ClusterControllerStubSettings extends StubSettings deleteClusterSettings; private final OperationCallSettings deleteClusterOperationSettings; + private final UnaryCallSettings getClusterSettings; + private final PagedCallSettings< + ListClustersRequest, ListClustersResponse, ListClustersPagedResponse> + listClustersSettings; private final UnaryCallSettings diagnoseClusterSettings; private final OperationCallSettings< DiagnoseClusterRequest, DiagnoseClusterResults, ClusterOperationMetadata> diagnoseClusterOperationSettings; - private final UnaryCallSettings getClusterSettings; - private final PagedCallSettings< + + private static final PagedListDescriptor + LIST_CLUSTERS_PAGE_STR_DESC = + new PagedListDescriptor() { + @Override + public String emptyToken() { + return ""; + } + + @Override + public ListClustersRequest injectToken(ListClustersRequest payload, String token) { + return ListClustersRequest.newBuilder(payload).setPageToken(token).build(); + } + + @Override + public ListClustersRequest injectPageSize(ListClustersRequest payload, int pageSize) { + return ListClustersRequest.newBuilder(payload).setPageSize(pageSize).build(); + } + + @Override + public Integer extractPageSize(ListClustersRequest payload) { + return payload.getPageSize(); + } + + @Override + public String extractNextToken(ListClustersResponse payload) { + return payload.getNextPageToken(); + } + + @Override + public Iterable extractResources(ListClustersResponse payload) { + return payload.getClustersList() == null + ? ImmutableList.of() + : payload.getClustersList(); + } + }; + + private static final PagedListResponseFactory< ListClustersRequest, ListClustersResponse, ListClustersPagedResponse> - listClustersSettings; + LIST_CLUSTERS_PAGE_STR_FACT = + new PagedListResponseFactory< + ListClustersRequest, ListClustersResponse, ListClustersPagedResponse>() { + @Override + public ApiFuture getFuturePagedResponse( + UnaryCallable callable, + ListClustersRequest request, + ApiCallContext context, + ApiFuture futureResponse) { + PageContext pageContext = + PageContext.create(callable, LIST_CLUSTERS_PAGE_STR_DESC, request, context); + return ListClustersPagedResponse.createAsync(pageContext, futureResponse); + } + }; /** Returns the object with the settings used for calls to createCluster. */ public UnaryCallSettings createClusterSettings() { @@ -126,7 +182,6 @@ public UnaryCallSettings createClusterSettings( } /** Returns the object with the settings used for calls to createCluster. */ - @BetaApi("The surface for use by generated code is not stable yet and may change in the future.") public OperationCallSettings createClusterOperationSettings() { return createClusterOperationSettings; @@ -138,7 +193,6 @@ public UnaryCallSettings updateClusterSettings( } /** Returns the object with the settings used for calls to updateCluster. */ - @BetaApi("The surface for use by generated code is not stable yet and may change in the future.") public OperationCallSettings updateClusterOperationSettings() { return updateClusterOperationSettings; @@ -150,46 +204,44 @@ public UnaryCallSettings deleteClusterSettings( } /** Returns the object with the settings used for calls to deleteCluster. */ - @BetaApi("The surface for use by generated code is not stable yet and may change in the future.") public OperationCallSettings deleteClusterOperationSettings() { return deleteClusterOperationSettings; } + /** Returns the object with the settings used for calls to getCluster. */ + public UnaryCallSettings getClusterSettings() { + return getClusterSettings; + } + + /** Returns the object with the settings used for calls to listClusters. */ + public PagedCallSettings + listClustersSettings() { + return listClustersSettings; + } + /** Returns the object with the settings used for calls to diagnoseCluster. */ public UnaryCallSettings diagnoseClusterSettings() { return diagnoseClusterSettings; } /** Returns the object with the settings used for calls to diagnoseCluster. */ - @BetaApi("The surface for use by generated code is not stable yet and may change in the future.") public OperationCallSettings< DiagnoseClusterRequest, DiagnoseClusterResults, ClusterOperationMetadata> diagnoseClusterOperationSettings() { return diagnoseClusterOperationSettings; } - /** Returns the object with the settings used for calls to getCluster. */ - public UnaryCallSettings getClusterSettings() { - return getClusterSettings; - } - - /** Returns the object with the settings used for calls to listClusters. */ - public PagedCallSettings - listClustersSettings() { - return listClustersSettings; - } - @BetaApi("A restructuring of stub classes is planned, so this may break in the future") public ClusterControllerStub createStub() throws IOException { if (getTransportChannelProvider() .getTransportName() .equals(GrpcTransportChannel.getGrpcTransportName())) { return GrpcClusterControllerStub.create(this); - } else { - throw new UnsupportedOperationException( - "Transport not supported: " + getTransportChannelProvider().getTransportName()); } + throw new UnsupportedOperationException( + String.format( + "Transport not supported: %s", getTransportChannelProvider().getTransportName())); } /** Returns a builder for the default ExecutorProvider for this service. */ @@ -255,69 +307,15 @@ protected ClusterControllerStubSettings(Builder settingsBuilder) throws IOExcept updateClusterOperationSettings = settingsBuilder.updateClusterOperationSettings().build(); deleteClusterSettings = settingsBuilder.deleteClusterSettings().build(); deleteClusterOperationSettings = settingsBuilder.deleteClusterOperationSettings().build(); - diagnoseClusterSettings = settingsBuilder.diagnoseClusterSettings().build(); - diagnoseClusterOperationSettings = settingsBuilder.diagnoseClusterOperationSettings().build(); getClusterSettings = settingsBuilder.getClusterSettings().build(); listClustersSettings = settingsBuilder.listClustersSettings().build(); + diagnoseClusterSettings = settingsBuilder.diagnoseClusterSettings().build(); + diagnoseClusterOperationSettings = settingsBuilder.diagnoseClusterOperationSettings().build(); } - private static final PagedListDescriptor - LIST_CLUSTERS_PAGE_STR_DESC = - new PagedListDescriptor() { - @Override - public String emptyToken() { - return ""; - } - - @Override - public ListClustersRequest injectToken(ListClustersRequest payload, String token) { - return ListClustersRequest.newBuilder(payload).setPageToken(token).build(); - } - - @Override - public ListClustersRequest injectPageSize(ListClustersRequest payload, int pageSize) { - return ListClustersRequest.newBuilder(payload).setPageSize(pageSize).build(); - } - - @Override - public Integer extractPageSize(ListClustersRequest payload) { - return payload.getPageSize(); - } - - @Override - public String extractNextToken(ListClustersResponse payload) { - return payload.getNextPageToken(); - } - - @Override - public Iterable extractResources(ListClustersResponse payload) { - return payload.getClustersList() != null - ? payload.getClustersList() - : ImmutableList.of(); - } - }; - - private static final PagedListResponseFactory< - ListClustersRequest, ListClustersResponse, ListClustersPagedResponse> - LIST_CLUSTERS_PAGE_STR_FACT = - new PagedListResponseFactory< - ListClustersRequest, ListClustersResponse, ListClustersPagedResponse>() { - @Override - public ApiFuture getFuturePagedResponse( - UnaryCallable callable, - ListClustersRequest request, - ApiCallContext context, - ApiFuture futureResponse) { - PageContext pageContext = - PageContext.create(callable, LIST_CLUSTERS_PAGE_STR_DESC, request, context); - return ListClustersPagedResponse.createAsync(pageContext, futureResponse); - } - }; - /** Builder for ClusterControllerStubSettings. */ public static class Builder extends StubSettings.Builder { private final ImmutableList> unaryMethodSettingsBuilders; - private final UnaryCallSettings.Builder createClusterSettings; private final OperationCallSettings.Builder< CreateClusterRequest, Cluster, ClusterOperationMetadata> @@ -330,22 +328,24 @@ public static class Builder extends StubSettings.Builder deleteClusterOperationSettings; + private final UnaryCallSettings.Builder getClusterSettings; + private final PagedCallSettings.Builder< + ListClustersRequest, ListClustersResponse, ListClustersPagedResponse> + listClustersSettings; private final UnaryCallSettings.Builder diagnoseClusterSettings; private final OperationCallSettings.Builder< DiagnoseClusterRequest, DiagnoseClusterResults, ClusterOperationMetadata> diagnoseClusterOperationSettings; - private final UnaryCallSettings.Builder getClusterSettings; - private final PagedCallSettings.Builder< - ListClustersRequest, ListClustersResponse, ListClustersPagedResponse> - listClustersSettings; - private static final ImmutableMap> RETRYABLE_CODE_DEFINITIONS; static { ImmutableMap.Builder> definitions = ImmutableMap.builder(); + definitions.put( + "retry_policy_5_codes", + ImmutableSet.copyOf(Lists.newArrayList(StatusCode.Code.UNAVAILABLE))); definitions.put( "retry_policy_6_codes", ImmutableSet.copyOf( @@ -353,10 +353,6 @@ public static class Builder extends StubSettings.BuildernewArrayList())); - definitions.put( - "retry_policy_5_codes", - ImmutableSet.copyOf(Lists.newArrayList(StatusCode.Code.UNAVAILABLE))); RETRYABLE_CODE_DEFINITIONS = definitions.build(); } @@ -375,7 +371,7 @@ public static class Builder extends StubSettings.Builder>of( + createClusterSettings, + updateClusterSettings, + deleteClusterSettings, + getClusterSettings, + listClustersSettings, + diagnoseClusterSettings); + initDefaults(this); + } - listClustersSettings = PagedCallSettings.newBuilder(LIST_CLUSTERS_PAGE_STR_FACT); + protected Builder(ClusterControllerStubSettings settings) { + super(settings); + + createClusterSettings = settings.createClusterSettings.toBuilder(); + createClusterOperationSettings = settings.createClusterOperationSettings.toBuilder(); + updateClusterSettings = settings.updateClusterSettings.toBuilder(); + updateClusterOperationSettings = settings.updateClusterOperationSettings.toBuilder(); + deleteClusterSettings = settings.deleteClusterSettings.toBuilder(); + deleteClusterOperationSettings = settings.deleteClusterOperationSettings.toBuilder(); + getClusterSettings = settings.getClusterSettings.toBuilder(); + listClustersSettings = settings.listClustersSettings.toBuilder(); + diagnoseClusterSettings = settings.diagnoseClusterSettings.toBuilder(); + diagnoseClusterOperationSettings = settings.diagnoseClusterOperationSettings.toBuilder(); unaryMethodSettingsBuilders = ImmutableList.>of( createClusterSettings, updateClusterSettings, deleteClusterSettings, - diagnoseClusterSettings, getClusterSettings, - listClustersSettings); - - initDefaults(this); + listClustersSettings, + diagnoseClusterSettings); } private static Builder createDefault() { - Builder builder = new Builder((ClientContext) null); + Builder builder = new Builder(((ClientContext) null)); + builder.setTransportChannelProvider(defaultTransportChannelProvider()); builder.setCredentialsProvider(defaultCredentialsProviderBuilder().build()); builder.setInternalHeaderProvider(defaultApiClientHeaderProviderBuilder().build()); builder.setEndpoint(getDefaultEndpoint()); + return initDefaults(builder); } private static Builder initDefaults(Builder builder) { - builder .createClusterSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_5_codes")) @@ -457,11 +466,6 @@ private static Builder initDefaults(Builder builder) { .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_5_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_5_params")); - builder - .diagnoseClusterSettings() - .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_5_codes")) - .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_5_params")); - builder .getClusterSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_6_codes")) @@ -471,6 +475,12 @@ private static Builder initDefaults(Builder builder) { .listClustersSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_6_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_6_params")); + + builder + .diagnoseClusterSettings() + .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_5_codes")) + .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_5_params")); + builder .createClusterOperationSettings() .setInitialCallSettings( @@ -489,11 +499,12 @@ private static Builder initDefaults(Builder builder) { .setInitialRetryDelay(Duration.ofMillis(1000L)) .setRetryDelayMultiplier(2.0) .setMaxRetryDelay(Duration.ofMillis(10000L)) - .setInitialRpcTimeout(Duration.ZERO) // ignored - .setRpcTimeoutMultiplier(1.0) // ignored - .setMaxRpcTimeout(Duration.ZERO) // ignored + .setInitialRpcTimeout(Duration.ZERO) + .setRpcTimeoutMultiplier(1.0) + .setMaxRpcTimeout(Duration.ZERO) .setTotalTimeout(Duration.ofMillis(900000L)) .build())); + builder .updateClusterOperationSettings() .setInitialCallSettings( @@ -512,11 +523,12 @@ private static Builder initDefaults(Builder builder) { .setInitialRetryDelay(Duration.ofMillis(1000L)) .setRetryDelayMultiplier(2.0) .setMaxRetryDelay(Duration.ofMillis(10000L)) - .setInitialRpcTimeout(Duration.ZERO) // ignored - .setRpcTimeoutMultiplier(1.0) // ignored - .setMaxRpcTimeout(Duration.ZERO) // ignored + .setInitialRpcTimeout(Duration.ZERO) + .setRpcTimeoutMultiplier(1.0) + .setMaxRpcTimeout(Duration.ZERO) .setTotalTimeout(Duration.ofMillis(900000L)) .build())); + builder .deleteClusterOperationSettings() .setInitialCallSettings( @@ -535,11 +547,12 @@ private static Builder initDefaults(Builder builder) { .setInitialRetryDelay(Duration.ofMillis(1000L)) .setRetryDelayMultiplier(2.0) .setMaxRetryDelay(Duration.ofMillis(10000L)) - .setInitialRpcTimeout(Duration.ZERO) // ignored - .setRpcTimeoutMultiplier(1.0) // ignored - .setMaxRpcTimeout(Duration.ZERO) // ignored + .setInitialRpcTimeout(Duration.ZERO) + .setRpcTimeoutMultiplier(1.0) + .setMaxRpcTimeout(Duration.ZERO) .setTotalTimeout(Duration.ofMillis(900000L)) .build())); + builder .diagnoseClusterOperationSettings() .setInitialCallSettings( @@ -558,40 +571,16 @@ private static Builder initDefaults(Builder builder) { .setInitialRetryDelay(Duration.ofMillis(1000L)) .setRetryDelayMultiplier(2.0) .setMaxRetryDelay(Duration.ofMillis(10000L)) - .setInitialRpcTimeout(Duration.ZERO) // ignored - .setRpcTimeoutMultiplier(1.0) // ignored - .setMaxRpcTimeout(Duration.ZERO) // ignored + .setInitialRpcTimeout(Duration.ZERO) + .setRpcTimeoutMultiplier(1.0) + .setMaxRpcTimeout(Duration.ZERO) .setTotalTimeout(Duration.ofMillis(30000L)) .build())); return builder; } - protected Builder(ClusterControllerStubSettings settings) { - super(settings); - - createClusterSettings = settings.createClusterSettings.toBuilder(); - createClusterOperationSettings = settings.createClusterOperationSettings.toBuilder(); - updateClusterSettings = settings.updateClusterSettings.toBuilder(); - updateClusterOperationSettings = settings.updateClusterOperationSettings.toBuilder(); - deleteClusterSettings = settings.deleteClusterSettings.toBuilder(); - deleteClusterOperationSettings = settings.deleteClusterOperationSettings.toBuilder(); - diagnoseClusterSettings = settings.diagnoseClusterSettings.toBuilder(); - diagnoseClusterOperationSettings = settings.diagnoseClusterOperationSettings.toBuilder(); - getClusterSettings = settings.getClusterSettings.toBuilder(); - listClustersSettings = settings.listClustersSettings.toBuilder(); - - unaryMethodSettingsBuilders = - ImmutableList.>of( - createClusterSettings, - updateClusterSettings, - deleteClusterSettings, - diagnoseClusterSettings, - getClusterSettings, - listClustersSettings); - } - - // NEXT_MAJOR_VER: remove 'throws Exception' + // NEXT_MAJOR_VER: remove 'throws Exception'. /** * Applies the given settings updater function to all of the unary API methods in this service. * @@ -646,6 +635,18 @@ public UnaryCallSettings.Builder deleteClusterS return deleteClusterOperationSettings; } + /** Returns the builder for the settings used for calls to getCluster. */ + public UnaryCallSettings.Builder getClusterSettings() { + return getClusterSettings; + } + + /** Returns the builder for the settings used for calls to listClusters. */ + public PagedCallSettings.Builder< + ListClustersRequest, ListClustersResponse, ListClustersPagedResponse> + listClustersSettings() { + return listClustersSettings; + } + /** Returns the builder for the settings used for calls to diagnoseCluster. */ public UnaryCallSettings.Builder diagnoseClusterSettings() { return diagnoseClusterSettings; @@ -660,18 +661,6 @@ public UnaryCallSettings.Builder diagnoseClus return diagnoseClusterOperationSettings; } - /** Returns the builder for the settings used for calls to getCluster. */ - public UnaryCallSettings.Builder getClusterSettings() { - return getClusterSettings; - } - - /** Returns the builder for the settings used for calls to listClusters. */ - public PagedCallSettings.Builder< - ListClustersRequest, ListClustersResponse, ListClustersPagedResponse> - listClustersSettings() { - return listClustersSettings; - } - @Override public ClusterControllerStubSettings build() throws IOException { return new ClusterControllerStubSettings(this); diff --git a/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/stub/GrpcAutoscalingPolicyServiceCallableFactory.java b/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/stub/GrpcAutoscalingPolicyServiceCallableFactory.java index 54da4666..57de9885 100644 --- a/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/stub/GrpcAutoscalingPolicyServiceCallableFactory.java +++ b/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/stub/GrpcAutoscalingPolicyServiceCallableFactory.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,9 +13,9 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.dataproc.v1.stub; -import com.google.api.core.BetaApi; import com.google.api.gax.grpc.GrpcCallSettings; import com.google.api.gax.grpc.GrpcCallableFactory; import com.google.api.gax.grpc.GrpcStubCallableFactory; @@ -31,18 +31,19 @@ import com.google.api.gax.rpc.StreamingCallSettings; import com.google.api.gax.rpc.UnaryCallSettings; import com.google.api.gax.rpc.UnaryCallable; +import com.google.longrunning.Operation; import com.google.longrunning.stub.OperationsStub; import javax.annotation.Generated; -// AUTO-GENERATED DOCUMENTATION AND CLASS +// AUTO-GENERATED DOCUMENTATION AND CLASS. /** - * gRPC callable factory implementation for Cloud Dataproc API. + * gRPC callable factory implementation for the AutoscalingPolicyService service API. * *

This class is for advanced usage. */ @Generated("by gapic-generator") -@BetaApi("The surface for use by generated code is not stable yet and may change in the future.") public class GrpcAutoscalingPolicyServiceCallableFactory implements GrpcStubCallableFactory { + @Override public UnaryCallable createUnaryCallable( GrpcCallSettings grpcCallSettings, @@ -55,61 +56,58 @@ public UnaryCallable createUnaryCalla public UnaryCallable createPagedCallable( GrpcCallSettings grpcCallSettings, - PagedCallSettings pagedCallSettings, + PagedCallSettings callSettings, ClientContext clientContext) { - return GrpcCallableFactory.createPagedCallable( - grpcCallSettings, pagedCallSettings, clientContext); + return GrpcCallableFactory.createPagedCallable(grpcCallSettings, callSettings, clientContext); } @Override public UnaryCallable createBatchingCallable( GrpcCallSettings grpcCallSettings, - BatchingCallSettings batchingCallSettings, + BatchingCallSettings callSettings, ClientContext clientContext) { return GrpcCallableFactory.createBatchingCallable( - grpcCallSettings, batchingCallSettings, clientContext); + grpcCallSettings, callSettings, clientContext); } - @BetaApi( - "The surface for long-running operations is not stable yet and may change in the future.") @Override public OperationCallable createOperationCallable( - GrpcCallSettings grpcCallSettings, - OperationCallSettings operationCallSettings, + GrpcCallSettings grpcCallSettings, + OperationCallSettings callSettings, ClientContext clientContext, OperationsStub operationsStub) { return GrpcCallableFactory.createOperationCallable( - grpcCallSettings, operationCallSettings, clientContext, operationsStub); + grpcCallSettings, callSettings, clientContext, operationsStub); } @Override public BidiStreamingCallable createBidiStreamingCallable( GrpcCallSettings grpcCallSettings, - StreamingCallSettings streamingCallSettings, + StreamingCallSettings callSettings, ClientContext clientContext) { return GrpcCallableFactory.createBidiStreamingCallable( - grpcCallSettings, streamingCallSettings, clientContext); + grpcCallSettings, callSettings, clientContext); } @Override public ServerStreamingCallable createServerStreamingCallable( GrpcCallSettings grpcCallSettings, - ServerStreamingCallSettings streamingCallSettings, + ServerStreamingCallSettings callSettings, ClientContext clientContext) { return GrpcCallableFactory.createServerStreamingCallable( - grpcCallSettings, streamingCallSettings, clientContext); + grpcCallSettings, callSettings, clientContext); } @Override public ClientStreamingCallable createClientStreamingCallable( GrpcCallSettings grpcCallSettings, - StreamingCallSettings streamingCallSettings, + StreamingCallSettings callSettings, ClientContext clientContext) { return GrpcCallableFactory.createClientStreamingCallable( - grpcCallSettings, streamingCallSettings, clientContext); + grpcCallSettings, callSettings, clientContext); } } diff --git a/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/stub/GrpcAutoscalingPolicyServiceStub.java b/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/stub/GrpcAutoscalingPolicyServiceStub.java index c8f1fde2..286c9f4a 100644 --- a/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/stub/GrpcAutoscalingPolicyServiceStub.java +++ b/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/stub/GrpcAutoscalingPolicyServiceStub.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,11 +13,11 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.dataproc.v1.stub; import static com.google.cloud.dataproc.v1.AutoscalingPolicyServiceClient.ListAutoscalingPoliciesPagedResponse; -import com.google.api.core.BetaApi; import com.google.api.gax.core.BackgroundResource; import com.google.api.gax.core.BackgroundResourceAggregation; import com.google.api.gax.grpc.GrpcCallSettings; @@ -33,6 +33,7 @@ import com.google.cloud.dataproc.v1.ListAutoscalingPoliciesResponse; import com.google.cloud.dataproc.v1.UpdateAutoscalingPolicyRequest; import com.google.common.collect.ImmutableMap; +import com.google.longrunning.stub.GrpcOperationsStub; import com.google.protobuf.Empty; import io.grpc.MethodDescriptor; import io.grpc.protobuf.ProtoUtils; @@ -41,16 +42,14 @@ import java.util.concurrent.TimeUnit; import javax.annotation.Generated; -// AUTO-GENERATED DOCUMENTATION AND CLASS +// AUTO-GENERATED DOCUMENTATION AND CLASS. /** - * gRPC stub implementation for Cloud Dataproc API. + * gRPC stub implementation for the AutoscalingPolicyService service API. * *

This class is for advanced usage and reflects the underlying API directly. */ -@Generated("by gapic-generator") -@BetaApi("A restructuring of stub classes is planned, so this may break in the future") +@Generated("by gapic-generator-java") public class GrpcAutoscalingPolicyServiceStub extends AutoscalingPolicyServiceStub { - private static final MethodDescriptor createAutoscalingPolicyMethodDescriptor = MethodDescriptor.newBuilder() @@ -61,6 +60,7 @@ public class GrpcAutoscalingPolicyServiceStub extends AutoscalingPolicyServiceSt ProtoUtils.marshaller(CreateAutoscalingPolicyRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(AutoscalingPolicy.getDefaultInstance())) .build(); + private static final MethodDescriptor updateAutoscalingPolicyMethodDescriptor = MethodDescriptor.newBuilder() @@ -71,6 +71,7 @@ public class GrpcAutoscalingPolicyServiceStub extends AutoscalingPolicyServiceSt ProtoUtils.marshaller(UpdateAutoscalingPolicyRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(AutoscalingPolicy.getDefaultInstance())) .build(); + private static final MethodDescriptor getAutoscalingPolicyMethodDescriptor = MethodDescriptor.newBuilder() @@ -81,6 +82,7 @@ public class GrpcAutoscalingPolicyServiceStub extends AutoscalingPolicyServiceSt ProtoUtils.marshaller(GetAutoscalingPolicyRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(AutoscalingPolicy.getDefaultInstance())) .build(); + private static final MethodDescriptor< ListAutoscalingPoliciesRequest, ListAutoscalingPoliciesResponse> listAutoscalingPoliciesMethodDescriptor = @@ -94,6 +96,7 @@ public class GrpcAutoscalingPolicyServiceStub extends AutoscalingPolicyServiceSt .setResponseMarshaller( ProtoUtils.marshaller(ListAutoscalingPoliciesResponse.getDefaultInstance())) .build(); + private static final MethodDescriptor deleteAutoscalingPolicyMethodDescriptor = MethodDescriptor.newBuilder() @@ -105,8 +108,6 @@ public class GrpcAutoscalingPolicyServiceStub extends AutoscalingPolicyServiceSt .setResponseMarshaller(ProtoUtils.marshaller(Empty.getDefaultInstance())) .build(); - private final BackgroundResource backgroundResources; - private final UnaryCallable createAutoscalingPolicyCallable; private final UnaryCallable @@ -120,6 +121,8 @@ public class GrpcAutoscalingPolicyServiceStub extends AutoscalingPolicyServiceSt private final UnaryCallable deleteAutoscalingPolicyCallable; + private final BackgroundResource backgroundResources; + private final GrpcOperationsStub operationsStub; private final GrpcStubCallableFactory callableFactory; public static final GrpcAutoscalingPolicyServiceStub create( @@ -161,6 +164,7 @@ protected GrpcAutoscalingPolicyServiceStub( GrpcStubCallableFactory callableFactory) throws IOException { this.callableFactory = callableFactory; + this.operationsStub = GrpcOperationsStub.create(clientContext, callableFactory); GrpcCallSettings createAutoscalingPolicyTransportSettings = @@ -265,7 +269,12 @@ public Map extract(DeleteAutoscalingPolicyRequest request) { settings.deleteAutoscalingPolicySettings(), clientContext); - backgroundResources = new BackgroundResourceAggregation(clientContext.getBackgroundResources()); + this.backgroundResources = + new BackgroundResourceAggregation(clientContext.getBackgroundResources()); + } + + public GrpcOperationsStub getOperationsStub() { + return operationsStub; } public UnaryCallable @@ -283,16 +292,16 @@ public Map extract(DeleteAutoscalingPolicyRequest request) { return getAutoscalingPolicyCallable; } - public UnaryCallable - listAutoscalingPoliciesPagedCallable() { - return listAutoscalingPoliciesPagedCallable; - } - public UnaryCallable listAutoscalingPoliciesCallable() { return listAutoscalingPoliciesCallable; } + public UnaryCallable + listAutoscalingPoliciesPagedCallable() { + return listAutoscalingPoliciesPagedCallable; + } + public UnaryCallable deleteAutoscalingPolicyCallable() { return deleteAutoscalingPolicyCallable; } diff --git a/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/stub/GrpcClusterControllerCallableFactory.java b/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/stub/GrpcClusterControllerCallableFactory.java index 153e6aa2..030721a0 100644 --- a/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/stub/GrpcClusterControllerCallableFactory.java +++ b/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/stub/GrpcClusterControllerCallableFactory.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,9 +13,9 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.dataproc.v1.stub; -import com.google.api.core.BetaApi; import com.google.api.gax.grpc.GrpcCallSettings; import com.google.api.gax.grpc.GrpcCallableFactory; import com.google.api.gax.grpc.GrpcStubCallableFactory; @@ -31,18 +31,19 @@ import com.google.api.gax.rpc.StreamingCallSettings; import com.google.api.gax.rpc.UnaryCallSettings; import com.google.api.gax.rpc.UnaryCallable; +import com.google.longrunning.Operation; import com.google.longrunning.stub.OperationsStub; import javax.annotation.Generated; -// AUTO-GENERATED DOCUMENTATION AND CLASS +// AUTO-GENERATED DOCUMENTATION AND CLASS. /** - * gRPC callable factory implementation for Cloud Dataproc API. + * gRPC callable factory implementation for the ClusterController service API. * *

This class is for advanced usage. */ @Generated("by gapic-generator") -@BetaApi("The surface for use by generated code is not stable yet and may change in the future.") public class GrpcClusterControllerCallableFactory implements GrpcStubCallableFactory { + @Override public UnaryCallable createUnaryCallable( GrpcCallSettings grpcCallSettings, @@ -55,61 +56,58 @@ public UnaryCallable createUnaryCalla public UnaryCallable createPagedCallable( GrpcCallSettings grpcCallSettings, - PagedCallSettings pagedCallSettings, + PagedCallSettings callSettings, ClientContext clientContext) { - return GrpcCallableFactory.createPagedCallable( - grpcCallSettings, pagedCallSettings, clientContext); + return GrpcCallableFactory.createPagedCallable(grpcCallSettings, callSettings, clientContext); } @Override public UnaryCallable createBatchingCallable( GrpcCallSettings grpcCallSettings, - BatchingCallSettings batchingCallSettings, + BatchingCallSettings callSettings, ClientContext clientContext) { return GrpcCallableFactory.createBatchingCallable( - grpcCallSettings, batchingCallSettings, clientContext); + grpcCallSettings, callSettings, clientContext); } - @BetaApi( - "The surface for long-running operations is not stable yet and may change in the future.") @Override public OperationCallable createOperationCallable( - GrpcCallSettings grpcCallSettings, - OperationCallSettings operationCallSettings, + GrpcCallSettings grpcCallSettings, + OperationCallSettings callSettings, ClientContext clientContext, OperationsStub operationsStub) { return GrpcCallableFactory.createOperationCallable( - grpcCallSettings, operationCallSettings, clientContext, operationsStub); + grpcCallSettings, callSettings, clientContext, operationsStub); } @Override public BidiStreamingCallable createBidiStreamingCallable( GrpcCallSettings grpcCallSettings, - StreamingCallSettings streamingCallSettings, + StreamingCallSettings callSettings, ClientContext clientContext) { return GrpcCallableFactory.createBidiStreamingCallable( - grpcCallSettings, streamingCallSettings, clientContext); + grpcCallSettings, callSettings, clientContext); } @Override public ServerStreamingCallable createServerStreamingCallable( GrpcCallSettings grpcCallSettings, - ServerStreamingCallSettings streamingCallSettings, + ServerStreamingCallSettings callSettings, ClientContext clientContext) { return GrpcCallableFactory.createServerStreamingCallable( - grpcCallSettings, streamingCallSettings, clientContext); + grpcCallSettings, callSettings, clientContext); } @Override public ClientStreamingCallable createClientStreamingCallable( GrpcCallSettings grpcCallSettings, - StreamingCallSettings streamingCallSettings, + StreamingCallSettings callSettings, ClientContext clientContext) { return GrpcCallableFactory.createClientStreamingCallable( - grpcCallSettings, streamingCallSettings, clientContext); + grpcCallSettings, callSettings, clientContext); } } diff --git a/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/stub/GrpcClusterControllerStub.java b/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/stub/GrpcClusterControllerStub.java index b1c845f0..4aafc4b4 100644 --- a/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/stub/GrpcClusterControllerStub.java +++ b/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/stub/GrpcClusterControllerStub.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,17 +13,18 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.dataproc.v1.stub; import static com.google.cloud.dataproc.v1.ClusterControllerClient.ListClustersPagedResponse; -import com.google.api.core.BetaApi; import com.google.api.gax.core.BackgroundResource; import com.google.api.gax.core.BackgroundResourceAggregation; import com.google.api.gax.grpc.GrpcCallSettings; import com.google.api.gax.grpc.GrpcStubCallableFactory; import com.google.api.gax.rpc.ClientContext; import com.google.api.gax.rpc.OperationCallable; +import com.google.api.gax.rpc.RequestParamsExtractor; import com.google.api.gax.rpc.UnaryCallable; import com.google.cloud.dataproc.v1.Cluster; import com.google.cloud.dataproc.v1.ClusterOperationMetadata; @@ -35,25 +36,25 @@ import com.google.cloud.dataproc.v1.ListClustersRequest; import com.google.cloud.dataproc.v1.ListClustersResponse; import com.google.cloud.dataproc.v1.UpdateClusterRequest; +import com.google.common.collect.ImmutableMap; import com.google.longrunning.Operation; import com.google.longrunning.stub.GrpcOperationsStub; import com.google.protobuf.Empty; import io.grpc.MethodDescriptor; import io.grpc.protobuf.ProtoUtils; import java.io.IOException; +import java.util.Map; import java.util.concurrent.TimeUnit; import javax.annotation.Generated; -// AUTO-GENERATED DOCUMENTATION AND CLASS +// AUTO-GENERATED DOCUMENTATION AND CLASS. /** - * gRPC stub implementation for Cloud Dataproc API. + * gRPC stub implementation for the ClusterController service API. * *

This class is for advanced usage and reflects the underlying API directly. */ -@Generated("by gapic-generator") -@BetaApi("A restructuring of stub classes is planned, so this may break in the future") +@Generated("by gapic-generator-java") public class GrpcClusterControllerStub extends ClusterControllerStub { - private static final MethodDescriptor createClusterMethodDescriptor = MethodDescriptor.newBuilder() @@ -63,6 +64,7 @@ public class GrpcClusterControllerStub extends ClusterControllerStub { ProtoUtils.marshaller(CreateClusterRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(Operation.getDefaultInstance())) .build(); + private static final MethodDescriptor updateClusterMethodDescriptor = MethodDescriptor.newBuilder() @@ -72,6 +74,7 @@ public class GrpcClusterControllerStub extends ClusterControllerStub { ProtoUtils.marshaller(UpdateClusterRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(Operation.getDefaultInstance())) .build(); + private static final MethodDescriptor deleteClusterMethodDescriptor = MethodDescriptor.newBuilder() @@ -81,15 +84,7 @@ public class GrpcClusterControllerStub extends ClusterControllerStub { ProtoUtils.marshaller(DeleteClusterRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(Operation.getDefaultInstance())) .build(); - private static final MethodDescriptor - diagnoseClusterMethodDescriptor = - MethodDescriptor.newBuilder() - .setType(MethodDescriptor.MethodType.UNARY) - .setFullMethodName("google.cloud.dataproc.v1.ClusterController/DiagnoseCluster") - .setRequestMarshaller( - ProtoUtils.marshaller(DiagnoseClusterRequest.getDefaultInstance())) - .setResponseMarshaller(ProtoUtils.marshaller(Operation.getDefaultInstance())) - .build(); + private static final MethodDescriptor getClusterMethodDescriptor = MethodDescriptor.newBuilder() .setType(MethodDescriptor.MethodType.UNARY) @@ -97,6 +92,7 @@ public class GrpcClusterControllerStub extends ClusterControllerStub { .setRequestMarshaller(ProtoUtils.marshaller(GetClusterRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(Cluster.getDefaultInstance())) .build(); + private static final MethodDescriptor listClustersMethodDescriptor = MethodDescriptor.newBuilder() @@ -107,8 +103,15 @@ public class GrpcClusterControllerStub extends ClusterControllerStub { ProtoUtils.marshaller(ListClustersResponse.getDefaultInstance())) .build(); - private final BackgroundResource backgroundResources; - private final GrpcOperationsStub operationsStub; + private static final MethodDescriptor + diagnoseClusterMethodDescriptor = + MethodDescriptor.newBuilder() + .setType(MethodDescriptor.MethodType.UNARY) + .setFullMethodName("google.cloud.dataproc.v1.ClusterController/DiagnoseCluster") + .setRequestMarshaller( + ProtoUtils.marshaller(DiagnoseClusterRequest.getDefaultInstance())) + .setResponseMarshaller(ProtoUtils.marshaller(Operation.getDefaultInstance())) + .build(); private final UnaryCallable createClusterCallable; private final OperationCallable @@ -119,15 +122,17 @@ public class GrpcClusterControllerStub extends ClusterControllerStub { private final UnaryCallable deleteClusterCallable; private final OperationCallable deleteClusterOperationCallable; - private final UnaryCallable diagnoseClusterCallable; - private final OperationCallable< - DiagnoseClusterRequest, DiagnoseClusterResults, ClusterOperationMetadata> - diagnoseClusterOperationCallable; private final UnaryCallable getClusterCallable; private final UnaryCallable listClustersCallable; private final UnaryCallable listClustersPagedCallable; + private final UnaryCallable diagnoseClusterCallable; + private final OperationCallable< + DiagnoseClusterRequest, DiagnoseClusterResults, ClusterOperationMetadata> + diagnoseClusterOperationCallable; + private final BackgroundResource backgroundResources; + private final GrpcOperationsStub operationsStub; private final GrpcStubCallableFactory callableFactory; public static final GrpcClusterControllerStub create(ClusterControllerStubSettings settings) @@ -173,26 +178,90 @@ protected GrpcClusterControllerStub( GrpcCallSettings createClusterTransportSettings = GrpcCallSettings.newBuilder() .setMethodDescriptor(createClusterMethodDescriptor) + .setParamsExtractor( + new RequestParamsExtractor() { + @Override + public Map extract(CreateClusterRequest request) { + ImmutableMap.Builder params = ImmutableMap.builder(); + params.put("project_id", String.valueOf(request.getProjectId())); + params.put("region", String.valueOf(request.getRegion())); + return params.build(); + } + }) .build(); GrpcCallSettings updateClusterTransportSettings = GrpcCallSettings.newBuilder() .setMethodDescriptor(updateClusterMethodDescriptor) + .setParamsExtractor( + new RequestParamsExtractor() { + @Override + public Map extract(UpdateClusterRequest request) { + ImmutableMap.Builder params = ImmutableMap.builder(); + params.put("cluster_name", String.valueOf(request.getClusterName())); + params.put("project_id", String.valueOf(request.getProjectId())); + params.put("region", String.valueOf(request.getRegion())); + return params.build(); + } + }) .build(); GrpcCallSettings deleteClusterTransportSettings = GrpcCallSettings.newBuilder() .setMethodDescriptor(deleteClusterMethodDescriptor) - .build(); - GrpcCallSettings diagnoseClusterTransportSettings = - GrpcCallSettings.newBuilder() - .setMethodDescriptor(diagnoseClusterMethodDescriptor) + .setParamsExtractor( + new RequestParamsExtractor() { + @Override + public Map extract(DeleteClusterRequest request) { + ImmutableMap.Builder params = ImmutableMap.builder(); + params.put("cluster_name", String.valueOf(request.getClusterName())); + params.put("project_id", String.valueOf(request.getProjectId())); + params.put("region", String.valueOf(request.getRegion())); + return params.build(); + } + }) .build(); GrpcCallSettings getClusterTransportSettings = GrpcCallSettings.newBuilder() .setMethodDescriptor(getClusterMethodDescriptor) + .setParamsExtractor( + new RequestParamsExtractor() { + @Override + public Map extract(GetClusterRequest request) { + ImmutableMap.Builder params = ImmutableMap.builder(); + params.put("cluster_name", String.valueOf(request.getClusterName())); + params.put("project_id", String.valueOf(request.getProjectId())); + params.put("region", String.valueOf(request.getRegion())); + return params.build(); + } + }) .build(); GrpcCallSettings listClustersTransportSettings = GrpcCallSettings.newBuilder() .setMethodDescriptor(listClustersMethodDescriptor) + .setParamsExtractor( + new RequestParamsExtractor() { + @Override + public Map extract(ListClustersRequest request) { + ImmutableMap.Builder params = ImmutableMap.builder(); + params.put("project_id", String.valueOf(request.getProjectId())); + params.put("region", String.valueOf(request.getRegion())); + return params.build(); + } + }) + .build(); + GrpcCallSettings diagnoseClusterTransportSettings = + GrpcCallSettings.newBuilder() + .setMethodDescriptor(diagnoseClusterMethodDescriptor) + .setParamsExtractor( + new RequestParamsExtractor() { + @Override + public Map extract(DiagnoseClusterRequest request) { + ImmutableMap.Builder params = ImmutableMap.builder(); + params.put("cluster_name", String.valueOf(request.getClusterName())); + params.put("project_id", String.valueOf(request.getProjectId())); + params.put("region", String.valueOf(request.getRegion())); + return params.build(); + } + }) .build(); this.createClusterCallable = @@ -203,7 +272,7 @@ protected GrpcClusterControllerStub( createClusterTransportSettings, settings.createClusterOperationSettings(), clientContext, - this.operationsStub); + operationsStub); this.updateClusterCallable = callableFactory.createUnaryCallable( updateClusterTransportSettings, settings.updateClusterSettings(), clientContext); @@ -212,7 +281,7 @@ protected GrpcClusterControllerStub( updateClusterTransportSettings, settings.updateClusterOperationSettings(), clientContext, - this.operationsStub); + operationsStub); this.deleteClusterCallable = callableFactory.createUnaryCallable( deleteClusterTransportSettings, settings.deleteClusterSettings(), clientContext); @@ -221,16 +290,7 @@ protected GrpcClusterControllerStub( deleteClusterTransportSettings, settings.deleteClusterOperationSettings(), clientContext, - this.operationsStub); - this.diagnoseClusterCallable = - callableFactory.createUnaryCallable( - diagnoseClusterTransportSettings, settings.diagnoseClusterSettings(), clientContext); - this.diagnoseClusterOperationCallable = - callableFactory.createOperationCallable( - diagnoseClusterTransportSettings, - settings.diagnoseClusterOperationSettings(), - clientContext, - this.operationsStub); + operationsStub); this.getClusterCallable = callableFactory.createUnaryCallable( getClusterTransportSettings, settings.getClusterSettings(), clientContext); @@ -240,65 +300,70 @@ protected GrpcClusterControllerStub( this.listClustersPagedCallable = callableFactory.createPagedCallable( listClustersTransportSettings, settings.listClustersSettings(), clientContext); + this.diagnoseClusterCallable = + callableFactory.createUnaryCallable( + diagnoseClusterTransportSettings, settings.diagnoseClusterSettings(), clientContext); + this.diagnoseClusterOperationCallable = + callableFactory.createOperationCallable( + diagnoseClusterTransportSettings, + settings.diagnoseClusterOperationSettings(), + clientContext, + operationsStub); - backgroundResources = new BackgroundResourceAggregation(clientContext.getBackgroundResources()); + this.backgroundResources = + new BackgroundResourceAggregation(clientContext.getBackgroundResources()); } - @BetaApi("The surface for use by generated code is not stable yet and may change in the future.") public GrpcOperationsStub getOperationsStub() { return operationsStub; } - @BetaApi("The surface for use by generated code is not stable yet and may change in the future.") + public UnaryCallable createClusterCallable() { + return createClusterCallable; + } + public OperationCallable createClusterOperationCallable() { return createClusterOperationCallable; } - public UnaryCallable createClusterCallable() { - return createClusterCallable; + public UnaryCallable updateClusterCallable() { + return updateClusterCallable; } - @BetaApi("The surface for use by generated code is not stable yet and may change in the future.") public OperationCallable updateClusterOperationCallable() { return updateClusterOperationCallable; } - public UnaryCallable updateClusterCallable() { - return updateClusterCallable; + public UnaryCallable deleteClusterCallable() { + return deleteClusterCallable; } - @BetaApi("The surface for use by generated code is not stable yet and may change in the future.") public OperationCallable deleteClusterOperationCallable() { return deleteClusterOperationCallable; } - public UnaryCallable deleteClusterCallable() { - return deleteClusterCallable; - } - - @BetaApi("The surface for use by generated code is not stable yet and may change in the future.") - public OperationCallable - diagnoseClusterOperationCallable() { - return diagnoseClusterOperationCallable; - } - - public UnaryCallable diagnoseClusterCallable() { - return diagnoseClusterCallable; - } - public UnaryCallable getClusterCallable() { return getClusterCallable; } + public UnaryCallable listClustersCallable() { + return listClustersCallable; + } + public UnaryCallable listClustersPagedCallable() { return listClustersPagedCallable; } - public UnaryCallable listClustersCallable() { - return listClustersCallable; + public UnaryCallable diagnoseClusterCallable() { + return diagnoseClusterCallable; + } + + public OperationCallable + diagnoseClusterOperationCallable() { + return diagnoseClusterOperationCallable; } @Override diff --git a/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/stub/GrpcJobControllerCallableFactory.java b/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/stub/GrpcJobControllerCallableFactory.java index daa86f95..285d96d3 100644 --- a/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/stub/GrpcJobControllerCallableFactory.java +++ b/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/stub/GrpcJobControllerCallableFactory.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,9 +13,9 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.dataproc.v1.stub; -import com.google.api.core.BetaApi; import com.google.api.gax.grpc.GrpcCallSettings; import com.google.api.gax.grpc.GrpcCallableFactory; import com.google.api.gax.grpc.GrpcStubCallableFactory; @@ -31,18 +31,19 @@ import com.google.api.gax.rpc.StreamingCallSettings; import com.google.api.gax.rpc.UnaryCallSettings; import com.google.api.gax.rpc.UnaryCallable; +import com.google.longrunning.Operation; import com.google.longrunning.stub.OperationsStub; import javax.annotation.Generated; -// AUTO-GENERATED DOCUMENTATION AND CLASS +// AUTO-GENERATED DOCUMENTATION AND CLASS. /** - * gRPC callable factory implementation for Cloud Dataproc API. + * gRPC callable factory implementation for the JobController service API. * *

This class is for advanced usage. */ @Generated("by gapic-generator") -@BetaApi("The surface for use by generated code is not stable yet and may change in the future.") public class GrpcJobControllerCallableFactory implements GrpcStubCallableFactory { + @Override public UnaryCallable createUnaryCallable( GrpcCallSettings grpcCallSettings, @@ -55,61 +56,58 @@ public UnaryCallable createUnaryCalla public UnaryCallable createPagedCallable( GrpcCallSettings grpcCallSettings, - PagedCallSettings pagedCallSettings, + PagedCallSettings callSettings, ClientContext clientContext) { - return GrpcCallableFactory.createPagedCallable( - grpcCallSettings, pagedCallSettings, clientContext); + return GrpcCallableFactory.createPagedCallable(grpcCallSettings, callSettings, clientContext); } @Override public UnaryCallable createBatchingCallable( GrpcCallSettings grpcCallSettings, - BatchingCallSettings batchingCallSettings, + BatchingCallSettings callSettings, ClientContext clientContext) { return GrpcCallableFactory.createBatchingCallable( - grpcCallSettings, batchingCallSettings, clientContext); + grpcCallSettings, callSettings, clientContext); } - @BetaApi( - "The surface for long-running operations is not stable yet and may change in the future.") @Override public OperationCallable createOperationCallable( - GrpcCallSettings grpcCallSettings, - OperationCallSettings operationCallSettings, + GrpcCallSettings grpcCallSettings, + OperationCallSettings callSettings, ClientContext clientContext, OperationsStub operationsStub) { return GrpcCallableFactory.createOperationCallable( - grpcCallSettings, operationCallSettings, clientContext, operationsStub); + grpcCallSettings, callSettings, clientContext, operationsStub); } @Override public BidiStreamingCallable createBidiStreamingCallable( GrpcCallSettings grpcCallSettings, - StreamingCallSettings streamingCallSettings, + StreamingCallSettings callSettings, ClientContext clientContext) { return GrpcCallableFactory.createBidiStreamingCallable( - grpcCallSettings, streamingCallSettings, clientContext); + grpcCallSettings, callSettings, clientContext); } @Override public ServerStreamingCallable createServerStreamingCallable( GrpcCallSettings grpcCallSettings, - ServerStreamingCallSettings streamingCallSettings, + ServerStreamingCallSettings callSettings, ClientContext clientContext) { return GrpcCallableFactory.createServerStreamingCallable( - grpcCallSettings, streamingCallSettings, clientContext); + grpcCallSettings, callSettings, clientContext); } @Override public ClientStreamingCallable createClientStreamingCallable( GrpcCallSettings grpcCallSettings, - StreamingCallSettings streamingCallSettings, + StreamingCallSettings callSettings, ClientContext clientContext) { return GrpcCallableFactory.createClientStreamingCallable( - grpcCallSettings, streamingCallSettings, clientContext); + grpcCallSettings, callSettings, clientContext); } } diff --git a/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/stub/GrpcJobControllerStub.java b/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/stub/GrpcJobControllerStub.java index 294bcfb4..9e410dfa 100644 --- a/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/stub/GrpcJobControllerStub.java +++ b/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/stub/GrpcJobControllerStub.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,17 +13,18 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.dataproc.v1.stub; import static com.google.cloud.dataproc.v1.JobControllerClient.ListJobsPagedResponse; -import com.google.api.core.BetaApi; import com.google.api.gax.core.BackgroundResource; import com.google.api.gax.core.BackgroundResourceAggregation; import com.google.api.gax.grpc.GrpcCallSettings; import com.google.api.gax.grpc.GrpcStubCallableFactory; import com.google.api.gax.rpc.ClientContext; import com.google.api.gax.rpc.OperationCallable; +import com.google.api.gax.rpc.RequestParamsExtractor; import com.google.api.gax.rpc.UnaryCallable; import com.google.cloud.dataproc.v1.CancelJobRequest; import com.google.cloud.dataproc.v1.DeleteJobRequest; @@ -34,25 +35,25 @@ import com.google.cloud.dataproc.v1.ListJobsResponse; import com.google.cloud.dataproc.v1.SubmitJobRequest; import com.google.cloud.dataproc.v1.UpdateJobRequest; +import com.google.common.collect.ImmutableMap; import com.google.longrunning.Operation; import com.google.longrunning.stub.GrpcOperationsStub; import com.google.protobuf.Empty; import io.grpc.MethodDescriptor; import io.grpc.protobuf.ProtoUtils; import java.io.IOException; +import java.util.Map; import java.util.concurrent.TimeUnit; import javax.annotation.Generated; -// AUTO-GENERATED DOCUMENTATION AND CLASS +// AUTO-GENERATED DOCUMENTATION AND CLASS. /** - * gRPC stub implementation for Cloud Dataproc API. + * gRPC stub implementation for the JobController service API. * *

This class is for advanced usage and reflects the underlying API directly. */ -@Generated("by gapic-generator") -@BetaApi("A restructuring of stub classes is planned, so this may break in the future") +@Generated("by gapic-generator-java") public class GrpcJobControllerStub extends JobControllerStub { - private static final MethodDescriptor submitJobMethodDescriptor = MethodDescriptor.newBuilder() .setType(MethodDescriptor.MethodType.UNARY) @@ -60,6 +61,7 @@ public class GrpcJobControllerStub extends JobControllerStub { .setRequestMarshaller(ProtoUtils.marshaller(SubmitJobRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(Job.getDefaultInstance())) .build(); + private static final MethodDescriptor submitJobAsOperationMethodDescriptor = MethodDescriptor.newBuilder() @@ -68,6 +70,7 @@ public class GrpcJobControllerStub extends JobControllerStub { .setRequestMarshaller(ProtoUtils.marshaller(SubmitJobRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(Operation.getDefaultInstance())) .build(); + private static final MethodDescriptor getJobMethodDescriptor = MethodDescriptor.newBuilder() .setType(MethodDescriptor.MethodType.UNARY) @@ -75,6 +78,7 @@ public class GrpcJobControllerStub extends JobControllerStub { .setRequestMarshaller(ProtoUtils.marshaller(GetJobRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(Job.getDefaultInstance())) .build(); + private static final MethodDescriptor listJobsMethodDescriptor = MethodDescriptor.newBuilder() @@ -83,6 +87,7 @@ public class GrpcJobControllerStub extends JobControllerStub { .setRequestMarshaller(ProtoUtils.marshaller(ListJobsRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(ListJobsResponse.getDefaultInstance())) .build(); + private static final MethodDescriptor updateJobMethodDescriptor = MethodDescriptor.newBuilder() .setType(MethodDescriptor.MethodType.UNARY) @@ -90,6 +95,7 @@ public class GrpcJobControllerStub extends JobControllerStub { .setRequestMarshaller(ProtoUtils.marshaller(UpdateJobRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(Job.getDefaultInstance())) .build(); + private static final MethodDescriptor cancelJobMethodDescriptor = MethodDescriptor.newBuilder() .setType(MethodDescriptor.MethodType.UNARY) @@ -97,6 +103,7 @@ public class GrpcJobControllerStub extends JobControllerStub { .setRequestMarshaller(ProtoUtils.marshaller(CancelJobRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(Job.getDefaultInstance())) .build(); + private static final MethodDescriptor deleteJobMethodDescriptor = MethodDescriptor.newBuilder() .setType(MethodDescriptor.MethodType.UNARY) @@ -105,9 +112,6 @@ public class GrpcJobControllerStub extends JobControllerStub { .setResponseMarshaller(ProtoUtils.marshaller(Empty.getDefaultInstance())) .build(); - private final BackgroundResource backgroundResources; - private final GrpcOperationsStub operationsStub; - private final UnaryCallable submitJobCallable; private final UnaryCallable submitJobAsOperationCallable; private final OperationCallable @@ -119,6 +123,8 @@ public class GrpcJobControllerStub extends JobControllerStub { private final UnaryCallable cancelJobCallable; private final UnaryCallable deleteJobCallable; + private final BackgroundResource backgroundResources; + private final GrpcOperationsStub operationsStub; private final GrpcStubCallableFactory callableFactory; public static final GrpcJobControllerStub create(JobControllerStubSettings settings) @@ -162,30 +168,104 @@ protected GrpcJobControllerStub( GrpcCallSettings submitJobTransportSettings = GrpcCallSettings.newBuilder() .setMethodDescriptor(submitJobMethodDescriptor) + .setParamsExtractor( + new RequestParamsExtractor() { + @Override + public Map extract(SubmitJobRequest request) { + ImmutableMap.Builder params = ImmutableMap.builder(); + params.put("project_id", String.valueOf(request.getProjectId())); + params.put("region", String.valueOf(request.getRegion())); + return params.build(); + } + }) .build(); GrpcCallSettings submitJobAsOperationTransportSettings = GrpcCallSettings.newBuilder() .setMethodDescriptor(submitJobAsOperationMethodDescriptor) + .setParamsExtractor( + new RequestParamsExtractor() { + @Override + public Map extract(SubmitJobRequest request) { + ImmutableMap.Builder params = ImmutableMap.builder(); + params.put("project_id", String.valueOf(request.getProjectId())); + params.put("region", String.valueOf(request.getRegion())); + return params.build(); + } + }) .build(); GrpcCallSettings getJobTransportSettings = GrpcCallSettings.newBuilder() .setMethodDescriptor(getJobMethodDescriptor) + .setParamsExtractor( + new RequestParamsExtractor() { + @Override + public Map extract(GetJobRequest request) { + ImmutableMap.Builder params = ImmutableMap.builder(); + params.put("job_id", String.valueOf(request.getJobId())); + params.put("project_id", String.valueOf(request.getProjectId())); + params.put("region", String.valueOf(request.getRegion())); + return params.build(); + } + }) .build(); GrpcCallSettings listJobsTransportSettings = GrpcCallSettings.newBuilder() .setMethodDescriptor(listJobsMethodDescriptor) + .setParamsExtractor( + new RequestParamsExtractor() { + @Override + public Map extract(ListJobsRequest request) { + ImmutableMap.Builder params = ImmutableMap.builder(); + params.put("project_id", String.valueOf(request.getProjectId())); + params.put("region", String.valueOf(request.getRegion())); + return params.build(); + } + }) .build(); GrpcCallSettings updateJobTransportSettings = GrpcCallSettings.newBuilder() .setMethodDescriptor(updateJobMethodDescriptor) + .setParamsExtractor( + new RequestParamsExtractor() { + @Override + public Map extract(UpdateJobRequest request) { + ImmutableMap.Builder params = ImmutableMap.builder(); + params.put("job_id", String.valueOf(request.getJobId())); + params.put("project_id", String.valueOf(request.getProjectId())); + params.put("region", String.valueOf(request.getRegion())); + return params.build(); + } + }) .build(); GrpcCallSettings cancelJobTransportSettings = GrpcCallSettings.newBuilder() .setMethodDescriptor(cancelJobMethodDescriptor) + .setParamsExtractor( + new RequestParamsExtractor() { + @Override + public Map extract(CancelJobRequest request) { + ImmutableMap.Builder params = ImmutableMap.builder(); + params.put("job_id", String.valueOf(request.getJobId())); + params.put("project_id", String.valueOf(request.getProjectId())); + params.put("region", String.valueOf(request.getRegion())); + return params.build(); + } + }) .build(); GrpcCallSettings deleteJobTransportSettings = GrpcCallSettings.newBuilder() .setMethodDescriptor(deleteJobMethodDescriptor) + .setParamsExtractor( + new RequestParamsExtractor() { + @Override + public Map extract(DeleteJobRequest request) { + ImmutableMap.Builder params = ImmutableMap.builder(); + params.put("job_id", String.valueOf(request.getJobId())); + params.put("project_id", String.valueOf(request.getProjectId())); + params.put("region", String.valueOf(request.getRegion())); + return params.build(); + } + }) .build(); this.submitJobCallable = @@ -201,7 +281,7 @@ protected GrpcJobControllerStub( submitJobAsOperationTransportSettings, settings.submitJobAsOperationOperationSettings(), clientContext, - this.operationsStub); + operationsStub); this.getJobCallable = callableFactory.createUnaryCallable( getJobTransportSettings, settings.getJobSettings(), clientContext); @@ -221,10 +301,10 @@ protected GrpcJobControllerStub( callableFactory.createUnaryCallable( deleteJobTransportSettings, settings.deleteJobSettings(), clientContext); - backgroundResources = new BackgroundResourceAggregation(clientContext.getBackgroundResources()); + this.backgroundResources = + new BackgroundResourceAggregation(clientContext.getBackgroundResources()); } - @BetaApi("The surface for use by generated code is not stable yet and may change in the future.") public GrpcOperationsStub getOperationsStub() { return operationsStub; } @@ -233,28 +313,27 @@ public UnaryCallable submitJobCallable() { return submitJobCallable; } - @BetaApi("The surface for use by generated code is not stable yet and may change in the future.") + public UnaryCallable submitJobAsOperationCallable() { + return submitJobAsOperationCallable; + } + public OperationCallable submitJobAsOperationOperationCallable() { return submitJobAsOperationOperationCallable; } - public UnaryCallable submitJobAsOperationCallable() { - return submitJobAsOperationCallable; - } - public UnaryCallable getJobCallable() { return getJobCallable; } - public UnaryCallable listJobsPagedCallable() { - return listJobsPagedCallable; - } - public UnaryCallable listJobsCallable() { return listJobsCallable; } + public UnaryCallable listJobsPagedCallable() { + return listJobsPagedCallable; + } + public UnaryCallable updateJobCallable() { return updateJobCallable; } diff --git a/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/stub/GrpcWorkflowTemplateServiceCallableFactory.java b/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/stub/GrpcWorkflowTemplateServiceCallableFactory.java index 242d2116..4da9fd42 100644 --- a/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/stub/GrpcWorkflowTemplateServiceCallableFactory.java +++ b/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/stub/GrpcWorkflowTemplateServiceCallableFactory.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,9 +13,9 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.dataproc.v1.stub; -import com.google.api.core.BetaApi; import com.google.api.gax.grpc.GrpcCallSettings; import com.google.api.gax.grpc.GrpcCallableFactory; import com.google.api.gax.grpc.GrpcStubCallableFactory; @@ -31,18 +31,19 @@ import com.google.api.gax.rpc.StreamingCallSettings; import com.google.api.gax.rpc.UnaryCallSettings; import com.google.api.gax.rpc.UnaryCallable; +import com.google.longrunning.Operation; import com.google.longrunning.stub.OperationsStub; import javax.annotation.Generated; -// AUTO-GENERATED DOCUMENTATION AND CLASS +// AUTO-GENERATED DOCUMENTATION AND CLASS. /** - * gRPC callable factory implementation for Cloud Dataproc API. + * gRPC callable factory implementation for the WorkflowTemplateService service API. * *

This class is for advanced usage. */ @Generated("by gapic-generator") -@BetaApi("The surface for use by generated code is not stable yet and may change in the future.") public class GrpcWorkflowTemplateServiceCallableFactory implements GrpcStubCallableFactory { + @Override public UnaryCallable createUnaryCallable( GrpcCallSettings grpcCallSettings, @@ -55,61 +56,58 @@ public UnaryCallable createUnaryCalla public UnaryCallable createPagedCallable( GrpcCallSettings grpcCallSettings, - PagedCallSettings pagedCallSettings, + PagedCallSettings callSettings, ClientContext clientContext) { - return GrpcCallableFactory.createPagedCallable( - grpcCallSettings, pagedCallSettings, clientContext); + return GrpcCallableFactory.createPagedCallable(grpcCallSettings, callSettings, clientContext); } @Override public UnaryCallable createBatchingCallable( GrpcCallSettings grpcCallSettings, - BatchingCallSettings batchingCallSettings, + BatchingCallSettings callSettings, ClientContext clientContext) { return GrpcCallableFactory.createBatchingCallable( - grpcCallSettings, batchingCallSettings, clientContext); + grpcCallSettings, callSettings, clientContext); } - @BetaApi( - "The surface for long-running operations is not stable yet and may change in the future.") @Override public OperationCallable createOperationCallable( - GrpcCallSettings grpcCallSettings, - OperationCallSettings operationCallSettings, + GrpcCallSettings grpcCallSettings, + OperationCallSettings callSettings, ClientContext clientContext, OperationsStub operationsStub) { return GrpcCallableFactory.createOperationCallable( - grpcCallSettings, operationCallSettings, clientContext, operationsStub); + grpcCallSettings, callSettings, clientContext, operationsStub); } @Override public BidiStreamingCallable createBidiStreamingCallable( GrpcCallSettings grpcCallSettings, - StreamingCallSettings streamingCallSettings, + StreamingCallSettings callSettings, ClientContext clientContext) { return GrpcCallableFactory.createBidiStreamingCallable( - grpcCallSettings, streamingCallSettings, clientContext); + grpcCallSettings, callSettings, clientContext); } @Override public ServerStreamingCallable createServerStreamingCallable( GrpcCallSettings grpcCallSettings, - ServerStreamingCallSettings streamingCallSettings, + ServerStreamingCallSettings callSettings, ClientContext clientContext) { return GrpcCallableFactory.createServerStreamingCallable( - grpcCallSettings, streamingCallSettings, clientContext); + grpcCallSettings, callSettings, clientContext); } @Override public ClientStreamingCallable createClientStreamingCallable( GrpcCallSettings grpcCallSettings, - StreamingCallSettings streamingCallSettings, + StreamingCallSettings callSettings, ClientContext clientContext) { return GrpcCallableFactory.createClientStreamingCallable( - grpcCallSettings, streamingCallSettings, clientContext); + grpcCallSettings, callSettings, clientContext); } } diff --git a/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/stub/GrpcWorkflowTemplateServiceStub.java b/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/stub/GrpcWorkflowTemplateServiceStub.java index 7fb984c3..4da9b503 100644 --- a/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/stub/GrpcWorkflowTemplateServiceStub.java +++ b/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/stub/GrpcWorkflowTemplateServiceStub.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,11 +13,11 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.dataproc.v1.stub; import static com.google.cloud.dataproc.v1.WorkflowTemplateServiceClient.ListWorkflowTemplatesPagedResponse; -import com.google.api.core.BetaApi; import com.google.api.gax.core.BackgroundResource; import com.google.api.gax.core.BackgroundResourceAggregation; import com.google.api.gax.grpc.GrpcCallSettings; @@ -47,15 +47,35 @@ import java.util.concurrent.TimeUnit; import javax.annotation.Generated; -// AUTO-GENERATED DOCUMENTATION AND CLASS +// AUTO-GENERATED DOCUMENTATION AND CLASS. /** - * gRPC stub implementation for Cloud Dataproc API. + * gRPC stub implementation for the WorkflowTemplateService service API. * *

This class is for advanced usage and reflects the underlying API directly. */ -@Generated("by gapic-generator") -@BetaApi("A restructuring of stub classes is planned, so this may break in the future") +@Generated("by gapic-generator-java") public class GrpcWorkflowTemplateServiceStub extends WorkflowTemplateServiceStub { + private static final MethodDescriptor + createWorkflowTemplateMethodDescriptor = + MethodDescriptor.newBuilder() + .setType(MethodDescriptor.MethodType.UNARY) + .setFullMethodName( + "google.cloud.dataproc.v1.WorkflowTemplateService/CreateWorkflowTemplate") + .setRequestMarshaller( + ProtoUtils.marshaller(CreateWorkflowTemplateRequest.getDefaultInstance())) + .setResponseMarshaller(ProtoUtils.marshaller(WorkflowTemplate.getDefaultInstance())) + .build(); + + private static final MethodDescriptor + getWorkflowTemplateMethodDescriptor = + MethodDescriptor.newBuilder() + .setType(MethodDescriptor.MethodType.UNARY) + .setFullMethodName( + "google.cloud.dataproc.v1.WorkflowTemplateService/GetWorkflowTemplate") + .setRequestMarshaller( + ProtoUtils.marshaller(GetWorkflowTemplateRequest.getDefaultInstance())) + .setResponseMarshaller(ProtoUtils.marshaller(WorkflowTemplate.getDefaultInstance())) + .build(); private static final MethodDescriptor instantiateWorkflowTemplateMethodDescriptor = @@ -67,6 +87,7 @@ public class GrpcWorkflowTemplateServiceStub extends WorkflowTemplateServiceStub ProtoUtils.marshaller(InstantiateWorkflowTemplateRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(Operation.getDefaultInstance())) .build(); + private static final MethodDescriptor instantiateInlineWorkflowTemplateMethodDescriptor = MethodDescriptor.newBuilder() @@ -78,26 +99,7 @@ public class GrpcWorkflowTemplateServiceStub extends WorkflowTemplateServiceStub InstantiateInlineWorkflowTemplateRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(Operation.getDefaultInstance())) .build(); - private static final MethodDescriptor - createWorkflowTemplateMethodDescriptor = - MethodDescriptor.newBuilder() - .setType(MethodDescriptor.MethodType.UNARY) - .setFullMethodName( - "google.cloud.dataproc.v1.WorkflowTemplateService/CreateWorkflowTemplate") - .setRequestMarshaller( - ProtoUtils.marshaller(CreateWorkflowTemplateRequest.getDefaultInstance())) - .setResponseMarshaller(ProtoUtils.marshaller(WorkflowTemplate.getDefaultInstance())) - .build(); - private static final MethodDescriptor - getWorkflowTemplateMethodDescriptor = - MethodDescriptor.newBuilder() - .setType(MethodDescriptor.MethodType.UNARY) - .setFullMethodName( - "google.cloud.dataproc.v1.WorkflowTemplateService/GetWorkflowTemplate") - .setRequestMarshaller( - ProtoUtils.marshaller(GetWorkflowTemplateRequest.getDefaultInstance())) - .setResponseMarshaller(ProtoUtils.marshaller(WorkflowTemplate.getDefaultInstance())) - .build(); + private static final MethodDescriptor updateWorkflowTemplateMethodDescriptor = MethodDescriptor.newBuilder() @@ -108,6 +110,7 @@ public class GrpcWorkflowTemplateServiceStub extends WorkflowTemplateServiceStub ProtoUtils.marshaller(UpdateWorkflowTemplateRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(WorkflowTemplate.getDefaultInstance())) .build(); + private static final MethodDescriptor listWorkflowTemplatesMethodDescriptor = MethodDescriptor.newBuilder() @@ -119,6 +122,7 @@ public class GrpcWorkflowTemplateServiceStub extends WorkflowTemplateServiceStub .setResponseMarshaller( ProtoUtils.marshaller(ListWorkflowTemplatesResponse.getDefaultInstance())) .build(); + private static final MethodDescriptor deleteWorkflowTemplateMethodDescriptor = MethodDescriptor.newBuilder() @@ -130,9 +134,10 @@ public class GrpcWorkflowTemplateServiceStub extends WorkflowTemplateServiceStub .setResponseMarshaller(ProtoUtils.marshaller(Empty.getDefaultInstance())) .build(); - private final BackgroundResource backgroundResources; - private final GrpcOperationsStub operationsStub; - + private final UnaryCallable + createWorkflowTemplateCallable; + private final UnaryCallable + getWorkflowTemplateCallable; private final UnaryCallable instantiateWorkflowTemplateCallable; private final OperationCallable @@ -141,10 +146,6 @@ public class GrpcWorkflowTemplateServiceStub extends WorkflowTemplateServiceStub instantiateInlineWorkflowTemplateCallable; private final OperationCallable instantiateInlineWorkflowTemplateOperationCallable; - private final UnaryCallable - createWorkflowTemplateCallable; - private final UnaryCallable - getWorkflowTemplateCallable; private final UnaryCallable updateWorkflowTemplateCallable; private final UnaryCallable @@ -153,6 +154,8 @@ public class GrpcWorkflowTemplateServiceStub extends WorkflowTemplateServiceStub listWorkflowTemplatesPagedCallable; private final UnaryCallable deleteWorkflowTemplateCallable; + private final BackgroundResource backgroundResources; + private final GrpcOperationsStub operationsStub; private final GrpcStubCallableFactory callableFactory; public static final GrpcWorkflowTemplateServiceStub create( @@ -196,6 +199,34 @@ protected GrpcWorkflowTemplateServiceStub( this.callableFactory = callableFactory; this.operationsStub = GrpcOperationsStub.create(clientContext, callableFactory); + GrpcCallSettings + createWorkflowTemplateTransportSettings = + GrpcCallSettings.newBuilder() + .setMethodDescriptor(createWorkflowTemplateMethodDescriptor) + .setParamsExtractor( + new RequestParamsExtractor() { + @Override + public Map extract(CreateWorkflowTemplateRequest request) { + ImmutableMap.Builder params = ImmutableMap.builder(); + params.put("parent", String.valueOf(request.getParent())); + return params.build(); + } + }) + .build(); + GrpcCallSettings + getWorkflowTemplateTransportSettings = + GrpcCallSettings.newBuilder() + .setMethodDescriptor(getWorkflowTemplateMethodDescriptor) + .setParamsExtractor( + new RequestParamsExtractor() { + @Override + public Map extract(GetWorkflowTemplateRequest request) { + ImmutableMap.Builder params = ImmutableMap.builder(); + params.put("name", String.valueOf(request.getName())); + return params.build(); + } + }) + .build(); GrpcCallSettings instantiateWorkflowTemplateTransportSettings = GrpcCallSettings.newBuilder() @@ -226,34 +257,6 @@ public Map extract( } }) .build(); - GrpcCallSettings - createWorkflowTemplateTransportSettings = - GrpcCallSettings.newBuilder() - .setMethodDescriptor(createWorkflowTemplateMethodDescriptor) - .setParamsExtractor( - new RequestParamsExtractor() { - @Override - public Map extract(CreateWorkflowTemplateRequest request) { - ImmutableMap.Builder params = ImmutableMap.builder(); - params.put("parent", String.valueOf(request.getParent())); - return params.build(); - } - }) - .build(); - GrpcCallSettings - getWorkflowTemplateTransportSettings = - GrpcCallSettings.newBuilder() - .setMethodDescriptor(getWorkflowTemplateMethodDescriptor) - .setParamsExtractor( - new RequestParamsExtractor() { - @Override - public Map extract(GetWorkflowTemplateRequest request) { - ImmutableMap.Builder params = ImmutableMap.builder(); - params.put("name", String.valueOf(request.getName())); - return params.build(); - } - }) - .build(); GrpcCallSettings updateWorkflowTemplateTransportSettings = GrpcCallSettings.newBuilder() @@ -298,6 +301,16 @@ public Map extract(DeleteWorkflowTemplateRequest request) { }) .build(); + this.createWorkflowTemplateCallable = + callableFactory.createUnaryCallable( + createWorkflowTemplateTransportSettings, + settings.createWorkflowTemplateSettings(), + clientContext); + this.getWorkflowTemplateCallable = + callableFactory.createUnaryCallable( + getWorkflowTemplateTransportSettings, + settings.getWorkflowTemplateSettings(), + clientContext); this.instantiateWorkflowTemplateCallable = callableFactory.createUnaryCallable( instantiateWorkflowTemplateTransportSettings, @@ -308,7 +321,7 @@ public Map extract(DeleteWorkflowTemplateRequest request) { instantiateWorkflowTemplateTransportSettings, settings.instantiateWorkflowTemplateOperationSettings(), clientContext, - this.operationsStub); + operationsStub); this.instantiateInlineWorkflowTemplateCallable = callableFactory.createUnaryCallable( instantiateInlineWorkflowTemplateTransportSettings, @@ -319,17 +332,7 @@ public Map extract(DeleteWorkflowTemplateRequest request) { instantiateInlineWorkflowTemplateTransportSettings, settings.instantiateInlineWorkflowTemplateOperationSettings(), clientContext, - this.operationsStub); - this.createWorkflowTemplateCallable = - callableFactory.createUnaryCallable( - createWorkflowTemplateTransportSettings, - settings.createWorkflowTemplateSettings(), - clientContext); - this.getWorkflowTemplateCallable = - callableFactory.createUnaryCallable( - getWorkflowTemplateTransportSettings, - settings.getWorkflowTemplateSettings(), - clientContext); + operationsStub); this.updateWorkflowTemplateCallable = callableFactory.createUnaryCallable( updateWorkflowTemplateTransportSettings, @@ -351,18 +354,21 @@ public Map extract(DeleteWorkflowTemplateRequest request) { settings.deleteWorkflowTemplateSettings(), clientContext); - backgroundResources = new BackgroundResourceAggregation(clientContext.getBackgroundResources()); + this.backgroundResources = + new BackgroundResourceAggregation(clientContext.getBackgroundResources()); } - @BetaApi("The surface for use by generated code is not stable yet and may change in the future.") public GrpcOperationsStub getOperationsStub() { return operationsStub; } - @BetaApi("The surface for use by generated code is not stable yet and may change in the future.") - public OperationCallable - instantiateWorkflowTemplateOperationCallable() { - return instantiateWorkflowTemplateOperationCallable; + public UnaryCallable + createWorkflowTemplateCallable() { + return createWorkflowTemplateCallable; + } + + public UnaryCallable getWorkflowTemplateCallable() { + return getWorkflowTemplateCallable; } public UnaryCallable @@ -370,10 +376,9 @@ public GrpcOperationsStub getOperationsStub() { return instantiateWorkflowTemplateCallable; } - @BetaApi("The surface for use by generated code is not stable yet and may change in the future.") - public OperationCallable - instantiateInlineWorkflowTemplateOperationCallable() { - return instantiateInlineWorkflowTemplateOperationCallable; + public OperationCallable + instantiateWorkflowTemplateOperationCallable() { + return instantiateWorkflowTemplateOperationCallable; } public UnaryCallable @@ -381,13 +386,9 @@ public GrpcOperationsStub getOperationsStub() { return instantiateInlineWorkflowTemplateCallable; } - public UnaryCallable - createWorkflowTemplateCallable() { - return createWorkflowTemplateCallable; - } - - public UnaryCallable getWorkflowTemplateCallable() { - return getWorkflowTemplateCallable; + public OperationCallable + instantiateInlineWorkflowTemplateOperationCallable() { + return instantiateInlineWorkflowTemplateOperationCallable; } public UnaryCallable @@ -395,16 +396,16 @@ public UnaryCallable getWorkflowTe return updateWorkflowTemplateCallable; } - public UnaryCallable - listWorkflowTemplatesPagedCallable() { - return listWorkflowTemplatesPagedCallable; - } - public UnaryCallable listWorkflowTemplatesCallable() { return listWorkflowTemplatesCallable; } + public UnaryCallable + listWorkflowTemplatesPagedCallable() { + return listWorkflowTemplatesPagedCallable; + } + public UnaryCallable deleteWorkflowTemplateCallable() { return deleteWorkflowTemplateCallable; } diff --git a/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/stub/JobControllerStub.java b/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/stub/JobControllerStub.java index 45aab410..550b6d22 100644 --- a/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/stub/JobControllerStub.java +++ b/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/stub/JobControllerStub.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,11 +13,11 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.dataproc.v1.stub; import static com.google.cloud.dataproc.v1.JobControllerClient.ListJobsPagedResponse; -import com.google.api.core.BetaApi; import com.google.api.gax.core.BackgroundResource; import com.google.api.gax.rpc.OperationCallable; import com.google.api.gax.rpc.UnaryCallable; @@ -35,17 +35,15 @@ import com.google.protobuf.Empty; import javax.annotation.Generated; -// AUTO-GENERATED DOCUMENTATION AND CLASS +// AUTO-GENERATED DOCUMENTATION AND CLASS. /** - * Base stub class for Cloud Dataproc API. + * Base stub class for the JobController service API. * *

This class is for advanced usage and reflects the underlying API directly. */ @Generated("by gapic-generator") -@BetaApi("A restructuring of stub classes is planned, so this may break in the future") public abstract class JobControllerStub implements BackgroundResource { - @BetaApi("The surface for use by generated code is not stable yet and may change in the future.") public OperationsStub getOperationsStub() { throw new UnsupportedOperationException("Not implemented: getOperationsStub()"); } @@ -54,7 +52,6 @@ public UnaryCallable submitJobCallable() { throw new UnsupportedOperationException("Not implemented: submitJobCallable()"); } - @BetaApi("The surface for use by generated code is not stable yet and may change in the future.") public OperationCallable submitJobAsOperationOperationCallable() { throw new UnsupportedOperationException( diff --git a/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/stub/JobControllerStubSettings.java b/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/stub/JobControllerStubSettings.java index 05965c1a..5790e2a4 100644 --- a/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/stub/JobControllerStubSettings.java +++ b/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/stub/JobControllerStubSettings.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,6 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.dataproc.v1.stub; import static com.google.cloud.dataproc.v1.JobControllerClient.ListJobsPagedResponse; @@ -63,7 +64,7 @@ import javax.annotation.Generated; import org.threeten.bp.Duration; -// AUTO-GENERATED DOCUMENTATION AND CLASS +// AUTO-GENERATED DOCUMENTATION AND CLASS. /** * Settings class to configure an instance of {@link JobControllerStub}. * @@ -80,22 +81,23 @@ * *

For example, to set the total timeout of submitJob to 30 seconds: * - *

- * 
+ * 
{@code
  * JobControllerStubSettings.Builder jobControllerSettingsBuilder =
  *     JobControllerStubSettings.newBuilder();
  * jobControllerSettingsBuilder
  *     .submitJobSettings()
  *     .setRetrySettings(
- *         jobControllerSettingsBuilder.submitJobSettings().getRetrySettings().toBuilder()
+ *         jobControllerSettingsBuilder
+ *             .submitJobSettings()
+ *             .getRetrySettings()
+ *             .toBuilder()
  *             .setTotalTimeout(Duration.ofSeconds(30))
  *             .build());
  * JobControllerStubSettings jobControllerSettings = jobControllerSettingsBuilder.build();
- * 
- * 
+ * }
*/ -@Generated("by gapic-generator") @BetaApi +@Generated("by gapic-generator-java") public class JobControllerStubSettings extends StubSettings { /** The default scopes of the service. */ private static final ImmutableList DEFAULT_SERVICE_SCOPES = @@ -112,18 +114,69 @@ public class JobControllerStubSettings extends StubSettings cancelJobSettings; private final UnaryCallSettings deleteJobSettings; + private static final PagedListDescriptor + LIST_JOBS_PAGE_STR_DESC = + new PagedListDescriptor() { + @Override + public String emptyToken() { + return ""; + } + + @Override + public ListJobsRequest injectToken(ListJobsRequest payload, String token) { + return ListJobsRequest.newBuilder(payload).setPageToken(token).build(); + } + + @Override + public ListJobsRequest injectPageSize(ListJobsRequest payload, int pageSize) { + return ListJobsRequest.newBuilder(payload).setPageSize(pageSize).build(); + } + + @Override + public Integer extractPageSize(ListJobsRequest payload) { + return payload.getPageSize(); + } + + @Override + public String extractNextToken(ListJobsResponse payload) { + return payload.getNextPageToken(); + } + + @Override + public Iterable extractResources(ListJobsResponse payload) { + return payload.getJobsList() == null + ? ImmutableList.of() + : payload.getJobsList(); + } + }; + + private static final PagedListResponseFactory< + ListJobsRequest, ListJobsResponse, ListJobsPagedResponse> + LIST_JOBS_PAGE_STR_FACT = + new PagedListResponseFactory() { + @Override + public ApiFuture getFuturePagedResponse( + UnaryCallable callable, + ListJobsRequest request, + ApiCallContext context, + ApiFuture futureResponse) { + PageContext pageContext = + PageContext.create(callable, LIST_JOBS_PAGE_STR_DESC, request, context); + return ListJobsPagedResponse.createAsync(pageContext, futureResponse); + } + }; + /** Returns the object with the settings used for calls to submitJob. */ public UnaryCallSettings submitJobSettings() { return submitJobSettings; } - /** Returns the object with the settings used for calls to submitJobAsOperation. */ + /** Returns the object with the settings used for calls to submitJobAs. */ public UnaryCallSettings submitJobAsOperationSettings() { return submitJobAsOperationSettings; } /** Returns the object with the settings used for calls to submitJobAsOperation. */ - @BetaApi("The surface for use by generated code is not stable yet and may change in the future.") public OperationCallSettings submitJobAsOperationOperationSettings() { return submitJobAsOperationOperationSettings; @@ -161,10 +214,10 @@ public JobControllerStub createStub() throws IOException { .getTransportName() .equals(GrpcTransportChannel.getGrpcTransportName())) { return GrpcJobControllerStub.create(this); - } else { - throw new UnsupportedOperationException( - "Transport not supported: " + getTransportChannelProvider().getTransportName()); } + throw new UnsupportedOperationException( + String.format( + "Transport not supported: %s", getTransportChannelProvider().getTransportName())); } /** Returns a builder for the default ExecutorProvider for this service. */ @@ -235,62 +288,9 @@ protected JobControllerStubSettings(Builder settingsBuilder) throws IOException deleteJobSettings = settingsBuilder.deleteJobSettings().build(); } - private static final PagedListDescriptor - LIST_JOBS_PAGE_STR_DESC = - new PagedListDescriptor() { - @Override - public String emptyToken() { - return ""; - } - - @Override - public ListJobsRequest injectToken(ListJobsRequest payload, String token) { - return ListJobsRequest.newBuilder(payload).setPageToken(token).build(); - } - - @Override - public ListJobsRequest injectPageSize(ListJobsRequest payload, int pageSize) { - return ListJobsRequest.newBuilder(payload).setPageSize(pageSize).build(); - } - - @Override - public Integer extractPageSize(ListJobsRequest payload) { - return payload.getPageSize(); - } - - @Override - public String extractNextToken(ListJobsResponse payload) { - return payload.getNextPageToken(); - } - - @Override - public Iterable extractResources(ListJobsResponse payload) { - return payload.getJobsList() != null - ? payload.getJobsList() - : ImmutableList.of(); - } - }; - - private static final PagedListResponseFactory< - ListJobsRequest, ListJobsResponse, ListJobsPagedResponse> - LIST_JOBS_PAGE_STR_FACT = - new PagedListResponseFactory() { - @Override - public ApiFuture getFuturePagedResponse( - UnaryCallable callable, - ListJobsRequest request, - ApiCallContext context, - ApiFuture futureResponse) { - PageContext pageContext = - PageContext.create(callable, LIST_JOBS_PAGE_STR_DESC, request, context); - return ListJobsPagedResponse.createAsync(pageContext, futureResponse); - } - }; - /** Builder for JobControllerStubSettings. */ public static class Builder extends StubSettings.Builder { private final ImmutableList> unaryMethodSettingsBuilders; - private final UnaryCallSettings.Builder submitJobSettings; private final UnaryCallSettings.Builder submitJobAsOperationSettings; @@ -303,7 +303,6 @@ public static class Builder extends StubSettings.Builder updateJobSettings; private final UnaryCallSettings.Builder cancelJobSettings; private final UnaryCallSettings.Builder deleteJobSettings; - private static final ImmutableMap> RETRYABLE_CODE_DEFINITIONS; @@ -311,27 +310,7 @@ public static class Builder extends StubSettings.Builder> definitions = ImmutableMap.builder(); definitions.put( - "retry_policy_4_codes", - ImmutableSet.copyOf( - Lists.newArrayList( - StatusCode.Code.DEADLINE_EXCEEDED, - StatusCode.Code.INTERNAL, - StatusCode.Code.UNAVAILABLE))); - definitions.put( - "retry_policy_1_codes", - ImmutableSet.copyOf( - Lists.newArrayList( - StatusCode.Code.DEADLINE_EXCEEDED, StatusCode.Code.UNAVAILABLE))); - definitions.put( - "retry_policy_6_codes", - ImmutableSet.copyOf( - Lists.newArrayList( - StatusCode.Code.INTERNAL, - StatusCode.Code.DEADLINE_EXCEEDED, - StatusCode.Code.UNAVAILABLE))); - definitions.put("no_retry_codes", ImmutableSet.copyOf(Lists.newArrayList())); - definitions.put( - "retry_policy_3_codes", + "retry_policy_7_codes", ImmutableSet.copyOf(Lists.newArrayList(StatusCode.Code.UNAVAILABLE))); definitions.put( "retry_policy_2_codes", @@ -340,14 +319,6 @@ public static class Builder extends StubSettings.BuildernewArrayList())); - definitions.put( - "retry_policy_5_codes", - ImmutableSet.copyOf(Lists.newArrayList(StatusCode.Code.UNAVAILABLE))); - definitions.put( - "retry_policy_7_codes", - ImmutableSet.copyOf(Lists.newArrayList(StatusCode.Code.UNAVAILABLE))); RETRYABLE_CODE_DEFINITIONS = definitions.build(); } @@ -356,28 +327,6 @@ public static class Builder extends StubSettings.Builder definitions = ImmutableMap.builder(); RetrySettings settings = null; - settings = - RetrySettings.newBuilder() - .setInitialRetryDelay(Duration.ofMillis(100L)) - .setRetryDelayMultiplier(1.3) - .setMaxRetryDelay(Duration.ofMillis(60000L)) - .setInitialRpcTimeout(Duration.ofMillis(600000L)) - .setRpcTimeoutMultiplier(1.0) - .setMaxRpcTimeout(Duration.ofMillis(600000L)) - .setTotalTimeout(Duration.ofMillis(600000L)) - .build(); - definitions.put("retry_policy_1_params", settings); - settings = - RetrySettings.newBuilder() - .setInitialRetryDelay(Duration.ofMillis(100L)) - .setRetryDelayMultiplier(1.3) - .setMaxRetryDelay(Duration.ofMillis(60000L)) - .setInitialRpcTimeout(Duration.ofMillis(600000L)) - .setRpcTimeoutMultiplier(1.0) - .setMaxRpcTimeout(Duration.ofMillis(600000L)) - .setTotalTimeout(Duration.ofMillis(600000L)) - .build(); - definitions.put("retry_policy_3_params", settings); settings = RetrySettings.newBuilder() .setInitialRetryDelay(Duration.ofMillis(100L)) @@ -388,18 +337,7 @@ public static class Builder extends StubSettings.Builder>of( + submitJobSettings, + submitJobAsOperationSettings, + getJobSettings, + listJobsSettings, + updateJobSettings, + cancelJobSettings, + deleteJobSettings); + } + private static Builder createDefault() { - Builder builder = new Builder((ClientContext) null); + Builder builder = new Builder(((ClientContext) null)); + builder.setTransportChannelProvider(defaultTransportChannelProvider()); builder.setCredentialsProvider(defaultCredentialsProviderBuilder().build()); builder.setInternalHeaderProvider(defaultApiClientHeaderProviderBuilder().build()); builder.setEndpoint(getDefaultEndpoint()); + return initDefaults(builder); } private static Builder initDefaults(Builder builder) { - builder .submitJobSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_7_codes")) @@ -527,6 +450,7 @@ private static Builder initDefaults(Builder builder) { .deleteJobSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_7_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_7_params")); + builder .submitJobAsOperationOperationSettings() .setInitialCallSettings( @@ -540,43 +464,19 @@ private static Builder initDefaults(Builder builder) { .setPollingAlgorithm( OperationTimedPollAlgorithm.create( RetrySettings.newBuilder() - .setInitialRetryDelay(Duration.ofMillis(500L)) + .setInitialRetryDelay(Duration.ofMillis(5000L)) .setRetryDelayMultiplier(1.5) - .setMaxRetryDelay(Duration.ofMillis(5000L)) - .setInitialRpcTimeout(Duration.ZERO) // ignored - .setRpcTimeoutMultiplier(1.0) // ignored - .setMaxRpcTimeout(Duration.ZERO) // ignored + .setMaxRetryDelay(Duration.ofMillis(45000L)) + .setInitialRpcTimeout(Duration.ZERO) + .setRpcTimeoutMultiplier(1.0) + .setMaxRpcTimeout(Duration.ZERO) .setTotalTimeout(Duration.ofMillis(300000L)) .build())); return builder; } - protected Builder(JobControllerStubSettings settings) { - super(settings); - - submitJobSettings = settings.submitJobSettings.toBuilder(); - submitJobAsOperationSettings = settings.submitJobAsOperationSettings.toBuilder(); - submitJobAsOperationOperationSettings = - settings.submitJobAsOperationOperationSettings.toBuilder(); - getJobSettings = settings.getJobSettings.toBuilder(); - listJobsSettings = settings.listJobsSettings.toBuilder(); - updateJobSettings = settings.updateJobSettings.toBuilder(); - cancelJobSettings = settings.cancelJobSettings.toBuilder(); - deleteJobSettings = settings.deleteJobSettings.toBuilder(); - - unaryMethodSettingsBuilders = - ImmutableList.>of( - submitJobSettings, - submitJobAsOperationSettings, - getJobSettings, - listJobsSettings, - updateJobSettings, - cancelJobSettings, - deleteJobSettings); - } - - // NEXT_MAJOR_VER: remove 'throws Exception' + // NEXT_MAJOR_VER: remove 'throws Exception'. /** * Applies the given settings updater function to all of the unary API methods in this service. * @@ -597,7 +497,7 @@ public UnaryCallSettings.Builder submitJobSettings() { return submitJobSettings; } - /** Returns the builder for the settings used for calls to submitJobAsOperation. */ + /** Returns the builder for the settings used for calls to submitJobAs. */ public UnaryCallSettings.Builder submitJobAsOperationSettings() { return submitJobAsOperationSettings; } diff --git a/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/stub/WorkflowTemplateServiceStub.java b/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/stub/WorkflowTemplateServiceStub.java index f87e3a0e..685c0e26 100644 --- a/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/stub/WorkflowTemplateServiceStub.java +++ b/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/stub/WorkflowTemplateServiceStub.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,11 +13,11 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.dataproc.v1.stub; import static com.google.cloud.dataproc.v1.WorkflowTemplateServiceClient.ListWorkflowTemplatesPagedResponse; -import com.google.api.core.BetaApi; import com.google.api.gax.core.BackgroundResource; import com.google.api.gax.rpc.OperationCallable; import com.google.api.gax.rpc.UnaryCallable; @@ -36,22 +36,28 @@ import com.google.protobuf.Empty; import javax.annotation.Generated; -// AUTO-GENERATED DOCUMENTATION AND CLASS +// AUTO-GENERATED DOCUMENTATION AND CLASS. /** - * Base stub class for Cloud Dataproc API. + * Base stub class for the WorkflowTemplateService service API. * *

This class is for advanced usage and reflects the underlying API directly. */ @Generated("by gapic-generator") -@BetaApi("A restructuring of stub classes is planned, so this may break in the future") public abstract class WorkflowTemplateServiceStub implements BackgroundResource { - @BetaApi("The surface for use by generated code is not stable yet and may change in the future.") public OperationsStub getOperationsStub() { throw new UnsupportedOperationException("Not implemented: getOperationsStub()"); } - @BetaApi("The surface for use by generated code is not stable yet and may change in the future.") + public UnaryCallable + createWorkflowTemplateCallable() { + throw new UnsupportedOperationException("Not implemented: createWorkflowTemplateCallable()"); + } + + public UnaryCallable getWorkflowTemplateCallable() { + throw new UnsupportedOperationException("Not implemented: getWorkflowTemplateCallable()"); + } + public OperationCallable instantiateWorkflowTemplateOperationCallable() { throw new UnsupportedOperationException( @@ -64,7 +70,6 @@ public OperationsStub getOperationsStub() { "Not implemented: instantiateWorkflowTemplateCallable()"); } - @BetaApi("The surface for use by generated code is not stable yet and may change in the future.") public OperationCallable instantiateInlineWorkflowTemplateOperationCallable() { throw new UnsupportedOperationException( @@ -77,15 +82,6 @@ public OperationsStub getOperationsStub() { "Not implemented: instantiateInlineWorkflowTemplateCallable()"); } - public UnaryCallable - createWorkflowTemplateCallable() { - throw new UnsupportedOperationException("Not implemented: createWorkflowTemplateCallable()"); - } - - public UnaryCallable getWorkflowTemplateCallable() { - throw new UnsupportedOperationException("Not implemented: getWorkflowTemplateCallable()"); - } - public UnaryCallable updateWorkflowTemplateCallable() { throw new UnsupportedOperationException("Not implemented: updateWorkflowTemplateCallable()"); diff --git a/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/stub/WorkflowTemplateServiceStubSettings.java b/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/stub/WorkflowTemplateServiceStubSettings.java index 33176458..1ec3503f 100644 --- a/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/stub/WorkflowTemplateServiceStubSettings.java +++ b/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/stub/WorkflowTemplateServiceStubSettings.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,6 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.dataproc.v1.stub; import static com.google.cloud.dataproc.v1.WorkflowTemplateServiceClient.ListWorkflowTemplatesPagedResponse; @@ -64,7 +65,7 @@ import javax.annotation.Generated; import org.threeten.bp.Duration; -// AUTO-GENERATED DOCUMENTATION AND CLASS +// AUTO-GENERATED DOCUMENTATION AND CLASS. /** * Settings class to configure an instance of {@link WorkflowTemplateServiceStub}. * @@ -81,28 +82,34 @@ * *

For example, to set the total timeout of createWorkflowTemplate to 30 seconds: * - *

- * 
+ * 
{@code
  * WorkflowTemplateServiceStubSettings.Builder workflowTemplateServiceSettingsBuilder =
  *     WorkflowTemplateServiceStubSettings.newBuilder();
  * workflowTemplateServiceSettingsBuilder
  *     .createWorkflowTemplateSettings()
  *     .setRetrySettings(
- *         workflowTemplateServiceSettingsBuilder.createWorkflowTemplateSettings().getRetrySettings().toBuilder()
+ *         workflowTemplateServiceSettingsBuilder
+ *             .createWorkflowTemplateSettings()
+ *             .getRetrySettings()
+ *             .toBuilder()
  *             .setTotalTimeout(Duration.ofSeconds(30))
  *             .build());
- * WorkflowTemplateServiceStubSettings workflowTemplateServiceSettings = workflowTemplateServiceSettingsBuilder.build();
- * 
- * 
+ * WorkflowTemplateServiceStubSettings workflowTemplateServiceSettings = + * workflowTemplateServiceSettingsBuilder.build(); + * }
*/ -@Generated("by gapic-generator") @BetaApi +@Generated("by gapic-generator-java") public class WorkflowTemplateServiceStubSettings extends StubSettings { /** The default scopes of the service. */ private static final ImmutableList DEFAULT_SERVICE_SCOPES = ImmutableList.builder().add("https://www.googleapis.com/auth/cloud-platform").build(); + private final UnaryCallSettings + createWorkflowTemplateSettings; + private final UnaryCallSettings + getWorkflowTemplateSettings; private final UnaryCallSettings instantiateWorkflowTemplateSettings; private final OperationCallSettings @@ -112,10 +119,6 @@ public class WorkflowTemplateServiceStubSettings private final OperationCallSettings< InstantiateInlineWorkflowTemplateRequest, Empty, WorkflowMetadata> instantiateInlineWorkflowTemplateOperationSettings; - private final UnaryCallSettings - createWorkflowTemplateSettings; - private final UnaryCallSettings - getWorkflowTemplateSettings; private final UnaryCallSettings updateWorkflowTemplateSettings; private final PagedCallSettings< @@ -126,6 +129,83 @@ public class WorkflowTemplateServiceStubSettings private final UnaryCallSettings deleteWorkflowTemplateSettings; + private static final PagedListDescriptor< + ListWorkflowTemplatesRequest, ListWorkflowTemplatesResponse, WorkflowTemplate> + LIST_WORKFLOW_TEMPLATES_PAGE_STR_DESC = + new PagedListDescriptor< + ListWorkflowTemplatesRequest, ListWorkflowTemplatesResponse, WorkflowTemplate>() { + @Override + public String emptyToken() { + return ""; + } + + @Override + public ListWorkflowTemplatesRequest injectToken( + ListWorkflowTemplatesRequest payload, String token) { + return ListWorkflowTemplatesRequest.newBuilder(payload).setPageToken(token).build(); + } + + @Override + public ListWorkflowTemplatesRequest injectPageSize( + ListWorkflowTemplatesRequest payload, int pageSize) { + return ListWorkflowTemplatesRequest.newBuilder(payload).setPageSize(pageSize).build(); + } + + @Override + public Integer extractPageSize(ListWorkflowTemplatesRequest payload) { + return payload.getPageSize(); + } + + @Override + public String extractNextToken(ListWorkflowTemplatesResponse payload) { + return payload.getNextPageToken(); + } + + @Override + public Iterable extractResources( + ListWorkflowTemplatesResponse payload) { + return payload.getTemplatesList() == null + ? ImmutableList.of() + : payload.getTemplatesList(); + } + }; + + private static final PagedListResponseFactory< + ListWorkflowTemplatesRequest, + ListWorkflowTemplatesResponse, + ListWorkflowTemplatesPagedResponse> + LIST_WORKFLOW_TEMPLATES_PAGE_STR_FACT = + new PagedListResponseFactory< + ListWorkflowTemplatesRequest, + ListWorkflowTemplatesResponse, + ListWorkflowTemplatesPagedResponse>() { + @Override + public ApiFuture getFuturePagedResponse( + UnaryCallable callable, + ListWorkflowTemplatesRequest request, + ApiCallContext context, + ApiFuture futureResponse) { + PageContext< + ListWorkflowTemplatesRequest, ListWorkflowTemplatesResponse, WorkflowTemplate> + pageContext = + PageContext.create( + callable, LIST_WORKFLOW_TEMPLATES_PAGE_STR_DESC, request, context); + return ListWorkflowTemplatesPagedResponse.createAsync(pageContext, futureResponse); + } + }; + + /** Returns the object with the settings used for calls to createWorkflowTemplate. */ + public UnaryCallSettings + createWorkflowTemplateSettings() { + return createWorkflowTemplateSettings; + } + + /** Returns the object with the settings used for calls to getWorkflowTemplate. */ + public UnaryCallSettings + getWorkflowTemplateSettings() { + return getWorkflowTemplateSettings; + } + /** Returns the object with the settings used for calls to instantiateWorkflowTemplate. */ public UnaryCallSettings instantiateWorkflowTemplateSettings() { @@ -133,7 +213,6 @@ public class WorkflowTemplateServiceStubSettings } /** Returns the object with the settings used for calls to instantiateWorkflowTemplate. */ - @BetaApi("The surface for use by generated code is not stable yet and may change in the future.") public OperationCallSettings instantiateWorkflowTemplateOperationSettings() { return instantiateWorkflowTemplateOperationSettings; @@ -146,24 +225,11 @@ public class WorkflowTemplateServiceStubSettings } /** Returns the object with the settings used for calls to instantiateInlineWorkflowTemplate. */ - @BetaApi("The surface for use by generated code is not stable yet and may change in the future.") public OperationCallSettings instantiateInlineWorkflowTemplateOperationSettings() { return instantiateInlineWorkflowTemplateOperationSettings; } - /** Returns the object with the settings used for calls to createWorkflowTemplate. */ - public UnaryCallSettings - createWorkflowTemplateSettings() { - return createWorkflowTemplateSettings; - } - - /** Returns the object with the settings used for calls to getWorkflowTemplate. */ - public UnaryCallSettings - getWorkflowTemplateSettings() { - return getWorkflowTemplateSettings; - } - /** Returns the object with the settings used for calls to updateWorkflowTemplate. */ public UnaryCallSettings updateWorkflowTemplateSettings() { @@ -190,10 +256,10 @@ public WorkflowTemplateServiceStub createStub() throws IOException { .getTransportName() .equals(GrpcTransportChannel.getGrpcTransportName())) { return GrpcWorkflowTemplateServiceStub.create(this); - } else { - throw new UnsupportedOperationException( - "Transport not supported: " + getTransportChannelProvider().getTransportName()); } + throw new UnsupportedOperationException( + String.format( + "Transport not supported: %s", getTransportChannelProvider().getTransportName())); } /** Returns a builder for the default ExecutorProvider for this service. */ @@ -253,6 +319,8 @@ public Builder toBuilder() { protected WorkflowTemplateServiceStubSettings(Builder settingsBuilder) throws IOException { super(settingsBuilder); + createWorkflowTemplateSettings = settingsBuilder.createWorkflowTemplateSettings().build(); + getWorkflowTemplateSettings = settingsBuilder.getWorkflowTemplateSettings().build(); instantiateWorkflowTemplateSettings = settingsBuilder.instantiateWorkflowTemplateSettings().build(); instantiateWorkflowTemplateOperationSettings = @@ -261,83 +329,19 @@ protected WorkflowTemplateServiceStubSettings(Builder settingsBuilder) throws IO settingsBuilder.instantiateInlineWorkflowTemplateSettings().build(); instantiateInlineWorkflowTemplateOperationSettings = settingsBuilder.instantiateInlineWorkflowTemplateOperationSettings().build(); - createWorkflowTemplateSettings = settingsBuilder.createWorkflowTemplateSettings().build(); - getWorkflowTemplateSettings = settingsBuilder.getWorkflowTemplateSettings().build(); updateWorkflowTemplateSettings = settingsBuilder.updateWorkflowTemplateSettings().build(); listWorkflowTemplatesSettings = settingsBuilder.listWorkflowTemplatesSettings().build(); deleteWorkflowTemplateSettings = settingsBuilder.deleteWorkflowTemplateSettings().build(); } - private static final PagedListDescriptor< - ListWorkflowTemplatesRequest, ListWorkflowTemplatesResponse, WorkflowTemplate> - LIST_WORKFLOW_TEMPLATES_PAGE_STR_DESC = - new PagedListDescriptor< - ListWorkflowTemplatesRequest, ListWorkflowTemplatesResponse, WorkflowTemplate>() { - @Override - public String emptyToken() { - return ""; - } - - @Override - public ListWorkflowTemplatesRequest injectToken( - ListWorkflowTemplatesRequest payload, String token) { - return ListWorkflowTemplatesRequest.newBuilder(payload).setPageToken(token).build(); - } - - @Override - public ListWorkflowTemplatesRequest injectPageSize( - ListWorkflowTemplatesRequest payload, int pageSize) { - return ListWorkflowTemplatesRequest.newBuilder(payload).setPageSize(pageSize).build(); - } - - @Override - public Integer extractPageSize(ListWorkflowTemplatesRequest payload) { - return payload.getPageSize(); - } - - @Override - public String extractNextToken(ListWorkflowTemplatesResponse payload) { - return payload.getNextPageToken(); - } - - @Override - public Iterable extractResources( - ListWorkflowTemplatesResponse payload) { - return payload.getTemplatesList() != null - ? payload.getTemplatesList() - : ImmutableList.of(); - } - }; - - private static final PagedListResponseFactory< - ListWorkflowTemplatesRequest, - ListWorkflowTemplatesResponse, - ListWorkflowTemplatesPagedResponse> - LIST_WORKFLOW_TEMPLATES_PAGE_STR_FACT = - new PagedListResponseFactory< - ListWorkflowTemplatesRequest, - ListWorkflowTemplatesResponse, - ListWorkflowTemplatesPagedResponse>() { - @Override - public ApiFuture getFuturePagedResponse( - UnaryCallable callable, - ListWorkflowTemplatesRequest request, - ApiCallContext context, - ApiFuture futureResponse) { - PageContext< - ListWorkflowTemplatesRequest, ListWorkflowTemplatesResponse, WorkflowTemplate> - pageContext = - PageContext.create( - callable, LIST_WORKFLOW_TEMPLATES_PAGE_STR_DESC, request, context); - return ListWorkflowTemplatesPagedResponse.createAsync(pageContext, futureResponse); - } - }; - /** Builder for WorkflowTemplateServiceStubSettings. */ public static class Builder extends StubSettings.Builder { private final ImmutableList> unaryMethodSettingsBuilders; - + private final UnaryCallSettings.Builder + createWorkflowTemplateSettings; + private final UnaryCallSettings.Builder + getWorkflowTemplateSettings; private final UnaryCallSettings.Builder instantiateWorkflowTemplateSettings; private final OperationCallSettings.Builder< @@ -348,10 +352,6 @@ public static class Builder private final OperationCallSettings.Builder< InstantiateInlineWorkflowTemplateRequest, Empty, WorkflowMetadata> instantiateInlineWorkflowTemplateOperationSettings; - private final UnaryCallSettings.Builder - createWorkflowTemplateSettings; - private final UnaryCallSettings.Builder - getWorkflowTemplateSettings; private final UnaryCallSettings.Builder updateWorkflowTemplateSettings; private final PagedCallSettings.Builder< @@ -361,13 +361,15 @@ public static class Builder listWorkflowTemplatesSettings; private final UnaryCallSettings.Builder deleteWorkflowTemplateSettings; - private static final ImmutableMap> RETRYABLE_CODE_DEFINITIONS; static { ImmutableMap.Builder> definitions = ImmutableMap.builder(); + definitions.put( + "retry_policy_3_codes", + ImmutableSet.copyOf(Lists.newArrayList(StatusCode.Code.UNAVAILABLE))); definitions.put( "retry_policy_4_codes", ImmutableSet.copyOf( @@ -375,10 +377,6 @@ public static class Builder StatusCode.Code.DEADLINE_EXCEEDED, StatusCode.Code.INTERNAL, StatusCode.Code.UNAVAILABLE))); - definitions.put("no_retry_codes", ImmutableSet.copyOf(Lists.newArrayList())); - definitions.put( - "retry_policy_3_codes", - ImmutableSet.copyOf(Lists.newArrayList(StatusCode.Code.UNAVAILABLE))); RETRYABLE_CODE_DEFINITIONS = definitions.build(); } @@ -409,80 +407,98 @@ public static class Builder .setTotalTimeout(Duration.ofMillis(600000L)) .build(); definitions.put("retry_policy_4_params", settings); - settings = RetrySettings.newBuilder().setRpcTimeoutMultiplier(1.0).build(); - definitions.put("no_retry_params", settings); RETRY_PARAM_DEFINITIONS = definitions.build(); } protected Builder() { - this((ClientContext) null); + this(((ClientContext) null)); } protected Builder(ClientContext clientContext) { super(clientContext); + createWorkflowTemplateSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); + getWorkflowTemplateSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); instantiateWorkflowTemplateSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); - instantiateWorkflowTemplateOperationSettings = OperationCallSettings.newBuilder(); - instantiateInlineWorkflowTemplateSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); - instantiateInlineWorkflowTemplateOperationSettings = OperationCallSettings.newBuilder(); - - createWorkflowTemplateSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); - - getWorkflowTemplateSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); - updateWorkflowTemplateSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); - listWorkflowTemplatesSettings = PagedCallSettings.newBuilder(LIST_WORKFLOW_TEMPLATES_PAGE_STR_FACT); - deleteWorkflowTemplateSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); unaryMethodSettingsBuilders = ImmutableList.>of( + createWorkflowTemplateSettings, + getWorkflowTemplateSettings, instantiateWorkflowTemplateSettings, instantiateInlineWorkflowTemplateSettings, + updateWorkflowTemplateSettings, + listWorkflowTemplatesSettings, + deleteWorkflowTemplateSettings); + initDefaults(this); + } + + protected Builder(WorkflowTemplateServiceStubSettings settings) { + super(settings); + + createWorkflowTemplateSettings = settings.createWorkflowTemplateSettings.toBuilder(); + getWorkflowTemplateSettings = settings.getWorkflowTemplateSettings.toBuilder(); + instantiateWorkflowTemplateSettings = + settings.instantiateWorkflowTemplateSettings.toBuilder(); + instantiateWorkflowTemplateOperationSettings = + settings.instantiateWorkflowTemplateOperationSettings.toBuilder(); + instantiateInlineWorkflowTemplateSettings = + settings.instantiateInlineWorkflowTemplateSettings.toBuilder(); + instantiateInlineWorkflowTemplateOperationSettings = + settings.instantiateInlineWorkflowTemplateOperationSettings.toBuilder(); + updateWorkflowTemplateSettings = settings.updateWorkflowTemplateSettings.toBuilder(); + listWorkflowTemplatesSettings = settings.listWorkflowTemplatesSettings.toBuilder(); + deleteWorkflowTemplateSettings = settings.deleteWorkflowTemplateSettings.toBuilder(); + + unaryMethodSettingsBuilders = + ImmutableList.>of( createWorkflowTemplateSettings, getWorkflowTemplateSettings, + instantiateWorkflowTemplateSettings, + instantiateInlineWorkflowTemplateSettings, updateWorkflowTemplateSettings, listWorkflowTemplatesSettings, deleteWorkflowTemplateSettings); - - initDefaults(this); } private static Builder createDefault() { - Builder builder = new Builder((ClientContext) null); + Builder builder = new Builder(((ClientContext) null)); + builder.setTransportChannelProvider(defaultTransportChannelProvider()); builder.setCredentialsProvider(defaultCredentialsProviderBuilder().build()); builder.setInternalHeaderProvider(defaultApiClientHeaderProviderBuilder().build()); builder.setEndpoint(getDefaultEndpoint()); + return initDefaults(builder); } private static Builder initDefaults(Builder builder) { - builder - .instantiateWorkflowTemplateSettings() + .createWorkflowTemplateSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_3_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_3_params")); builder - .instantiateInlineWorkflowTemplateSettings() - .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_3_codes")) - .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_3_params")); + .getWorkflowTemplateSettings() + .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_4_codes")) + .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_4_params")); builder - .createWorkflowTemplateSettings() + .instantiateWorkflowTemplateSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_3_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_3_params")); builder - .getWorkflowTemplateSettings() - .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_4_codes")) - .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_4_params")); + .instantiateInlineWorkflowTemplateSettings() + .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_3_codes")) + .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_3_params")); builder .updateWorkflowTemplateSettings() @@ -498,6 +514,7 @@ private static Builder initDefaults(Builder builder) { .deleteWorkflowTemplateSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_3_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_3_params")); + builder .instantiateWorkflowTemplateOperationSettings() .setInitialCallSettings( @@ -517,11 +534,12 @@ private static Builder initDefaults(Builder builder) { .setInitialRetryDelay(Duration.ofMillis(1000L)) .setRetryDelayMultiplier(2.0) .setMaxRetryDelay(Duration.ofMillis(10000L)) - .setInitialRpcTimeout(Duration.ZERO) // ignored - .setRpcTimeoutMultiplier(1.0) // ignored - .setMaxRpcTimeout(Duration.ZERO) // ignored + .setInitialRpcTimeout(Duration.ZERO) + .setRpcTimeoutMultiplier(1.0) + .setMaxRpcTimeout(Duration.ZERO) .setTotalTimeout(Duration.ofMillis(43200000L)) .build())); + builder .instantiateInlineWorkflowTemplateOperationSettings() .setInitialCallSettings( @@ -541,44 +559,16 @@ private static Builder initDefaults(Builder builder) { .setInitialRetryDelay(Duration.ofMillis(1000L)) .setRetryDelayMultiplier(2.0) .setMaxRetryDelay(Duration.ofMillis(10000L)) - .setInitialRpcTimeout(Duration.ZERO) // ignored - .setRpcTimeoutMultiplier(1.0) // ignored - .setMaxRpcTimeout(Duration.ZERO) // ignored + .setInitialRpcTimeout(Duration.ZERO) + .setRpcTimeoutMultiplier(1.0) + .setMaxRpcTimeout(Duration.ZERO) .setTotalTimeout(Duration.ofMillis(43200000L)) .build())); return builder; } - protected Builder(WorkflowTemplateServiceStubSettings settings) { - super(settings); - - instantiateWorkflowTemplateSettings = - settings.instantiateWorkflowTemplateSettings.toBuilder(); - instantiateWorkflowTemplateOperationSettings = - settings.instantiateWorkflowTemplateOperationSettings.toBuilder(); - instantiateInlineWorkflowTemplateSettings = - settings.instantiateInlineWorkflowTemplateSettings.toBuilder(); - instantiateInlineWorkflowTemplateOperationSettings = - settings.instantiateInlineWorkflowTemplateOperationSettings.toBuilder(); - createWorkflowTemplateSettings = settings.createWorkflowTemplateSettings.toBuilder(); - getWorkflowTemplateSettings = settings.getWorkflowTemplateSettings.toBuilder(); - updateWorkflowTemplateSettings = settings.updateWorkflowTemplateSettings.toBuilder(); - listWorkflowTemplatesSettings = settings.listWorkflowTemplatesSettings.toBuilder(); - deleteWorkflowTemplateSettings = settings.deleteWorkflowTemplateSettings.toBuilder(); - - unaryMethodSettingsBuilders = - ImmutableList.>of( - instantiateWorkflowTemplateSettings, - instantiateInlineWorkflowTemplateSettings, - createWorkflowTemplateSettings, - getWorkflowTemplateSettings, - updateWorkflowTemplateSettings, - listWorkflowTemplatesSettings, - deleteWorkflowTemplateSettings); - } - - // NEXT_MAJOR_VER: remove 'throws Exception' + // NEXT_MAJOR_VER: remove 'throws Exception'. /** * Applies the given settings updater function to all of the unary API methods in this service. * @@ -594,6 +584,18 @@ public Builder applyToAllUnaryMethods( return unaryMethodSettingsBuilders; } + /** Returns the builder for the settings used for calls to createWorkflowTemplate. */ + public UnaryCallSettings.Builder + createWorkflowTemplateSettings() { + return createWorkflowTemplateSettings; + } + + /** Returns the builder for the settings used for calls to getWorkflowTemplate. */ + public UnaryCallSettings.Builder + getWorkflowTemplateSettings() { + return getWorkflowTemplateSettings; + } + /** Returns the builder for the settings used for calls to instantiateWorkflowTemplate. */ public UnaryCallSettings.Builder instantiateWorkflowTemplateSettings() { @@ -624,18 +626,6 @@ public Builder applyToAllUnaryMethods( return instantiateInlineWorkflowTemplateOperationSettings; } - /** Returns the builder for the settings used for calls to createWorkflowTemplate. */ - public UnaryCallSettings.Builder - createWorkflowTemplateSettings() { - return createWorkflowTemplateSettings; - } - - /** Returns the builder for the settings used for calls to getWorkflowTemplate. */ - public UnaryCallSettings.Builder - getWorkflowTemplateSettings() { - return getWorkflowTemplateSettings; - } - /** Returns the builder for the settings used for calls to updateWorkflowTemplate. */ public UnaryCallSettings.Builder updateWorkflowTemplateSettings() { diff --git a/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/AutoscalingPolicyServiceClient.java b/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/AutoscalingPolicyServiceClient.java index 4c7f769c..56678fac 100644 --- a/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/AutoscalingPolicyServiceClient.java +++ b/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/AutoscalingPolicyServiceClient.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,6 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.dataproc.v1beta2; import com.google.api.core.ApiFunction; @@ -34,7 +35,7 @@ import java.util.concurrent.TimeUnit; import javax.annotation.Generated; -// AUTO-GENERATED DOCUMENTATION AND SERVICE +// AUTO-GENERATED DOCUMENTATION AND CLASS. /** * Service Description: The API interface for managing autoscaling policies in the Cloud Dataproc * API. @@ -42,17 +43,7 @@ *

This class provides the ability to make remote calls to the backing service through method * calls that map to API methods. Sample code to get started: * - *

- * 
- * try (AutoscalingPolicyServiceClient autoscalingPolicyServiceClient = AutoscalingPolicyServiceClient.create()) {
- *   RegionName parent = RegionName.of("[PROJECT]", "[REGION]");
- *   AutoscalingPolicy policy = AutoscalingPolicy.newBuilder().build();
- *   AutoscalingPolicy response = autoscalingPolicyServiceClient.createAutoscalingPolicy(parent, policy);
- * }
- * 
- * 
- * - *

Note: close() needs to be called on the autoscalingPolicyServiceClient object to clean up + *

Note: close() needs to be called on the AutoscalingPolicyServiceClient object to clean up * resources such as threads. In the example above, try-with-resources is used, which automatically * calls close(). * @@ -81,30 +72,28 @@ * *

To customize credentials: * - *

- * 
+ * 
{@code
  * AutoscalingPolicyServiceSettings autoscalingPolicyServiceSettings =
  *     AutoscalingPolicyServiceSettings.newBuilder()
  *         .setCredentialsProvider(FixedCredentialsProvider.create(myCredentials))
  *         .build();
  * AutoscalingPolicyServiceClient autoscalingPolicyServiceClient =
  *     AutoscalingPolicyServiceClient.create(autoscalingPolicyServiceSettings);
- * 
- * 
+ * }
* - * To customize the endpoint: + *

To customize the endpoint: * - *

- * 
+ * 
{@code
  * AutoscalingPolicyServiceSettings autoscalingPolicyServiceSettings =
  *     AutoscalingPolicyServiceSettings.newBuilder().setEndpoint(myEndpoint).build();
  * AutoscalingPolicyServiceClient autoscalingPolicyServiceClient =
  *     AutoscalingPolicyServiceClient.create(autoscalingPolicyServiceSettings);
- * 
- * 
+ * }
+ * + *

Please refer to the GitHub repository's samples for more quickstart code snippets. */ -@Generated("by gapic-generator") @BetaApi +@Generated("by gapic-generator") public class AutoscalingPolicyServiceClient implements BackgroundResource { private final AutoscalingPolicyServiceSettings settings; private final AutoscalingPolicyServiceStub stub; @@ -126,7 +115,7 @@ public static final AutoscalingPolicyServiceClient create( /** * Constructs an instance of AutoscalingPolicyServiceClient, using the given stub for making - * calls. This is for advanced usage - prefer to use AutoscalingPolicyServiceSettings}. + * calls. This is for advanced usage - prefer using create(AutoscalingPolicyServiceSettings). */ @BetaApi("A restructuring of stub classes is planned, so this may break in the future") public static final AutoscalingPolicyServiceClient create(AutoscalingPolicyServiceStub stub) { @@ -159,31 +148,26 @@ public AutoscalingPolicyServiceStub getStub() { return stub; } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Creates new autoscaling policy. * - *

Sample code: - * - *


-   * try (AutoscalingPolicyServiceClient autoscalingPolicyServiceClient = AutoscalingPolicyServiceClient.create()) {
-   *   RegionName parent = RegionName.of("[PROJECT]", "[REGION]");
-   *   AutoscalingPolicy policy = AutoscalingPolicy.newBuilder().build();
-   *   AutoscalingPolicy response = autoscalingPolicyServiceClient.createAutoscalingPolicy(parent, policy);
-   * }
-   * 
- * * @param parent Required. The "resource name" of the region or location, as described in * https://cloud.google.com/apis/design/resource_names. - *

* For `projects.regions.autoscalingPolicies.create`, the resource name has the - * following format: `projects/{project_id}/regions/{region}` - *

* For `projects.locations.autoscalingPolicies.create`, the resource name has the - * following format: `projects/{project_id}/locations/{location}` + *

    + *
  • For `projects.regions.autoscalingPolicies.create`, the resource name has the + * following format: `projects/{project_id}/regions/{region}` + *
+ *
    + *
  • For `projects.locations.autoscalingPolicies.create`, the resource name has the + * following format: `projects/{project_id}/locations/{location}` + *
+ * * @param policy Required. The autoscaling policy to create. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ public final AutoscalingPolicy createAutoscalingPolicy( - RegionName parent, AutoscalingPolicy policy) { + LocationName parent, AutoscalingPolicy policy) { CreateAutoscalingPolicyRequest request = CreateAutoscalingPolicyRequest.newBuilder() .setParent(parent == null ? null : parent.toString()) @@ -192,31 +176,26 @@ public final AutoscalingPolicy createAutoscalingPolicy( return createAutoscalingPolicy(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Creates new autoscaling policy. * - *

Sample code: - * - *


-   * try (AutoscalingPolicyServiceClient autoscalingPolicyServiceClient = AutoscalingPolicyServiceClient.create()) {
-   *   LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]");
-   *   AutoscalingPolicy policy = AutoscalingPolicy.newBuilder().build();
-   *   AutoscalingPolicy response = autoscalingPolicyServiceClient.createAutoscalingPolicy(parent, policy);
-   * }
-   * 
- * * @param parent Required. The "resource name" of the region or location, as described in * https://cloud.google.com/apis/design/resource_names. - *

* For `projects.regions.autoscalingPolicies.create`, the resource name has the - * following format: `projects/{project_id}/regions/{region}` - *

* For `projects.locations.autoscalingPolicies.create`, the resource name has the - * following format: `projects/{project_id}/locations/{location}` + *

    + *
  • For `projects.regions.autoscalingPolicies.create`, the resource name has the + * following format: `projects/{project_id}/regions/{region}` + *
+ *
    + *
  • For `projects.locations.autoscalingPolicies.create`, the resource name has the + * following format: `projects/{project_id}/locations/{location}` + *
+ * * @param policy Required. The autoscaling policy to create. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ public final AutoscalingPolicy createAutoscalingPolicy( - LocationName parent, AutoscalingPolicy policy) { + RegionName parent, AutoscalingPolicy policy) { CreateAutoscalingPolicyRequest request = CreateAutoscalingPolicyRequest.newBuilder() .setParent(parent == null ? null : parent.toString()) @@ -225,26 +204,21 @@ public final AutoscalingPolicy createAutoscalingPolicy( return createAutoscalingPolicy(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Creates new autoscaling policy. * - *

Sample code: - * - *


-   * try (AutoscalingPolicyServiceClient autoscalingPolicyServiceClient = AutoscalingPolicyServiceClient.create()) {
-   *   RegionName parent = RegionName.of("[PROJECT]", "[REGION]");
-   *   AutoscalingPolicy policy = AutoscalingPolicy.newBuilder().build();
-   *   AutoscalingPolicy response = autoscalingPolicyServiceClient.createAutoscalingPolicy(parent.toString(), policy);
-   * }
-   * 
- * * @param parent Required. The "resource name" of the region or location, as described in * https://cloud.google.com/apis/design/resource_names. - *

* For `projects.regions.autoscalingPolicies.create`, the resource name has the - * following format: `projects/{project_id}/regions/{region}` - *

* For `projects.locations.autoscalingPolicies.create`, the resource name has the - * following format: `projects/{project_id}/locations/{location}` + *

    + *
  • For `projects.regions.autoscalingPolicies.create`, the resource name has the + * following format: `projects/{project_id}/regions/{region}` + *
+ *
    + *
  • For `projects.locations.autoscalingPolicies.create`, the resource name has the + * following format: `projects/{project_id}/locations/{location}` + *
+ * * @param policy Required. The autoscaling policy to create. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -254,24 +228,10 @@ public final AutoscalingPolicy createAutoscalingPolicy(String parent, Autoscalin return createAutoscalingPolicy(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Creates new autoscaling policy. * - *

Sample code: - * - *


-   * try (AutoscalingPolicyServiceClient autoscalingPolicyServiceClient = AutoscalingPolicyServiceClient.create()) {
-   *   RegionName parent = RegionName.of("[PROJECT]", "[REGION]");
-   *   AutoscalingPolicy policy = AutoscalingPolicy.newBuilder().build();
-   *   CreateAutoscalingPolicyRequest request = CreateAutoscalingPolicyRequest.newBuilder()
-   *     .setParent(parent.toString())
-   *     .setPolicy(policy)
-   *     .build();
-   *   AutoscalingPolicy response = autoscalingPolicyServiceClient.createAutoscalingPolicy(request);
-   * }
-   * 
- * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -279,46 +239,23 @@ public final AutoscalingPolicy createAutoscalingPolicy(CreateAutoscalingPolicyRe return createAutoscalingPolicyCallable().call(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Creates new autoscaling policy. * *

Sample code: - * - *


-   * try (AutoscalingPolicyServiceClient autoscalingPolicyServiceClient = AutoscalingPolicyServiceClient.create()) {
-   *   RegionName parent = RegionName.of("[PROJECT]", "[REGION]");
-   *   AutoscalingPolicy policy = AutoscalingPolicy.newBuilder().build();
-   *   CreateAutoscalingPolicyRequest request = CreateAutoscalingPolicyRequest.newBuilder()
-   *     .setParent(parent.toString())
-   *     .setPolicy(policy)
-   *     .build();
-   *   ApiFuture<AutoscalingPolicy> future = autoscalingPolicyServiceClient.createAutoscalingPolicyCallable().futureCall(request);
-   *   // Do something
-   *   AutoscalingPolicy response = future.get();
-   * }
-   * 
*/ public final UnaryCallable createAutoscalingPolicyCallable() { return stub.createAutoscalingPolicyCallable(); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Updates (replaces) autoscaling policy. * *

Disabled check for update_mask, because all updates will be full replacements. * - *

Sample code: - * - *


-   * try (AutoscalingPolicyServiceClient autoscalingPolicyServiceClient = AutoscalingPolicyServiceClient.create()) {
-   *   AutoscalingPolicy policy = AutoscalingPolicy.newBuilder().build();
-   *   AutoscalingPolicy response = autoscalingPolicyServiceClient.updateAutoscalingPolicy(policy);
-   * }
-   * 
- * * @param policy Required. The updated autoscaling policy. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -328,24 +265,12 @@ public final AutoscalingPolicy updateAutoscalingPolicy(AutoscalingPolicy policy) return updateAutoscalingPolicy(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Updates (replaces) autoscaling policy. * *

Disabled check for update_mask, because all updates will be full replacements. * - *

Sample code: - * - *


-   * try (AutoscalingPolicyServiceClient autoscalingPolicyServiceClient = AutoscalingPolicyServiceClient.create()) {
-   *   AutoscalingPolicy policy = AutoscalingPolicy.newBuilder().build();
-   *   UpdateAutoscalingPolicyRequest request = UpdateAutoscalingPolicyRequest.newBuilder()
-   *     .setPolicy(policy)
-   *     .build();
-   *   AutoscalingPolicy response = autoscalingPolicyServiceClient.updateAutoscalingPolicy(request);
-   * }
-   * 
- * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -353,52 +278,36 @@ public final AutoscalingPolicy updateAutoscalingPolicy(UpdateAutoscalingPolicyRe return updateAutoscalingPolicyCallable().call(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Updates (replaces) autoscaling policy. * *

Disabled check for update_mask, because all updates will be full replacements. * *

Sample code: - * - *


-   * try (AutoscalingPolicyServiceClient autoscalingPolicyServiceClient = AutoscalingPolicyServiceClient.create()) {
-   *   AutoscalingPolicy policy = AutoscalingPolicy.newBuilder().build();
-   *   UpdateAutoscalingPolicyRequest request = UpdateAutoscalingPolicyRequest.newBuilder()
-   *     .setPolicy(policy)
-   *     .build();
-   *   ApiFuture<AutoscalingPolicy> future = autoscalingPolicyServiceClient.updateAutoscalingPolicyCallable().futureCall(request);
-   *   // Do something
-   *   AutoscalingPolicy response = future.get();
-   * }
-   * 
*/ public final UnaryCallable updateAutoscalingPolicyCallable() { return stub.updateAutoscalingPolicyCallable(); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Retrieves autoscaling policy. * - *

Sample code: - * - *


-   * try (AutoscalingPolicyServiceClient autoscalingPolicyServiceClient = AutoscalingPolicyServiceClient.create()) {
-   *   AutoscalingPolicyName name = AutoscalingPolicyName.ofProjectLocationAutoscalingPolicyName("[PROJECT]", "[LOCATION]", "[AUTOSCALING_POLICY]");
-   *   AutoscalingPolicy response = autoscalingPolicyServiceClient.getAutoscalingPolicy(name);
-   * }
-   * 
- * * @param name Required. The "resource name" of the autoscaling policy, as described in * https://cloud.google.com/apis/design/resource_names. - *

* For `projects.regions.autoscalingPolicies.get`, the resource name of the policy - * has the following format: - * `projects/{project_id}/regions/{region}/autoscalingPolicies/{policy_id}` - *

* For `projects.locations.autoscalingPolicies.get`, the resource name of the policy - * has the following format: - * `projects/{project_id}/locations/{location}/autoscalingPolicies/{policy_id}` + *

    + *
  • For `projects.regions.autoscalingPolicies.get`, the resource name of the policy has + * the following format: + * `projects/{project_id}/regions/{region}/autoscalingPolicies/{policy_id}` + *
+ *
    + *
  • For `projects.locations.autoscalingPolicies.get`, the resource name of the policy has + * the following format: + * `projects/{project_id}/locations/{location}/autoscalingPolicies/{policy_id}` + *
+ * * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ public final AutoscalingPolicy getAutoscalingPolicy(AutoscalingPolicyName name) { @@ -409,27 +318,23 @@ public final AutoscalingPolicy getAutoscalingPolicy(AutoscalingPolicyName name) return getAutoscalingPolicy(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Retrieves autoscaling policy. * - *

Sample code: - * - *


-   * try (AutoscalingPolicyServiceClient autoscalingPolicyServiceClient = AutoscalingPolicyServiceClient.create()) {
-   *   AutoscalingPolicyName name = AutoscalingPolicyName.ofProjectLocationAutoscalingPolicyName("[PROJECT]", "[LOCATION]", "[AUTOSCALING_POLICY]");
-   *   AutoscalingPolicy response = autoscalingPolicyServiceClient.getAutoscalingPolicy(name.toString());
-   * }
-   * 
- * * @param name Required. The "resource name" of the autoscaling policy, as described in * https://cloud.google.com/apis/design/resource_names. - *

* For `projects.regions.autoscalingPolicies.get`, the resource name of the policy - * has the following format: - * `projects/{project_id}/regions/{region}/autoscalingPolicies/{policy_id}` - *

* For `projects.locations.autoscalingPolicies.get`, the resource name of the policy - * has the following format: - * `projects/{project_id}/locations/{location}/autoscalingPolicies/{policy_id}` + *

    + *
  • For `projects.regions.autoscalingPolicies.get`, the resource name of the policy has + * the following format: + * `projects/{project_id}/regions/{region}/autoscalingPolicies/{policy_id}` + *
+ *
    + *
  • For `projects.locations.autoscalingPolicies.get`, the resource name of the policy has + * the following format: + * `projects/{project_id}/locations/{location}/autoscalingPolicies/{policy_id}` + *
+ * * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ public final AutoscalingPolicy getAutoscalingPolicy(String name) { @@ -438,22 +343,10 @@ public final AutoscalingPolicy getAutoscalingPolicy(String name) { return getAutoscalingPolicy(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Retrieves autoscaling policy. * - *

Sample code: - * - *


-   * try (AutoscalingPolicyServiceClient autoscalingPolicyServiceClient = AutoscalingPolicyServiceClient.create()) {
-   *   AutoscalingPolicyName name = AutoscalingPolicyName.ofProjectLocationAutoscalingPolicyName("[PROJECT]", "[LOCATION]", "[AUTOSCALING_POLICY]");
-   *   GetAutoscalingPolicyRequest request = GetAutoscalingPolicyRequest.newBuilder()
-   *     .setName(name.toString())
-   *     .build();
-   *   AutoscalingPolicy response = autoscalingPolicyServiceClient.getAutoscalingPolicy(request);
-   * }
-   * 
- * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -461,53 +354,35 @@ public final AutoscalingPolicy getAutoscalingPolicy(GetAutoscalingPolicyRequest return getAutoscalingPolicyCallable().call(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Retrieves autoscaling policy. * *

Sample code: - * - *


-   * try (AutoscalingPolicyServiceClient autoscalingPolicyServiceClient = AutoscalingPolicyServiceClient.create()) {
-   *   AutoscalingPolicyName name = AutoscalingPolicyName.ofProjectLocationAutoscalingPolicyName("[PROJECT]", "[LOCATION]", "[AUTOSCALING_POLICY]");
-   *   GetAutoscalingPolicyRequest request = GetAutoscalingPolicyRequest.newBuilder()
-   *     .setName(name.toString())
-   *     .build();
-   *   ApiFuture<AutoscalingPolicy> future = autoscalingPolicyServiceClient.getAutoscalingPolicyCallable().futureCall(request);
-   *   // Do something
-   *   AutoscalingPolicy response = future.get();
-   * }
-   * 
*/ public final UnaryCallable getAutoscalingPolicyCallable() { return stub.getAutoscalingPolicyCallable(); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Lists autoscaling policies in the project. * - *

Sample code: - * - *


-   * try (AutoscalingPolicyServiceClient autoscalingPolicyServiceClient = AutoscalingPolicyServiceClient.create()) {
-   *   RegionName parent = RegionName.of("[PROJECT]", "[REGION]");
-   *   for (AutoscalingPolicy element : autoscalingPolicyServiceClient.listAutoscalingPolicies(parent).iterateAll()) {
-   *     // doThingsWith(element);
-   *   }
-   * }
-   * 
- * * @param parent Required. The "resource name" of the region or location, as described in * https://cloud.google.com/apis/design/resource_names. - *

* For `projects.regions.autoscalingPolicies.list`, the resource name of the region - * has the following format: `projects/{project_id}/regions/{region}` - *

* For `projects.locations.autoscalingPolicies.list`, the resource name of the - * location has the following format: `projects/{project_id}/locations/{location}` + *

    + *
  • For `projects.regions.autoscalingPolicies.list`, the resource name of the region has + * the following format: `projects/{project_id}/regions/{region}` + *
+ *
    + *
  • For `projects.locations.autoscalingPolicies.list`, the resource name of the location + * has the following format: `projects/{project_id}/locations/{location}` + *
+ * * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ - public final ListAutoscalingPoliciesPagedResponse listAutoscalingPolicies(RegionName parent) { + public final ListAutoscalingPoliciesPagedResponse listAutoscalingPolicies(LocationName parent) { ListAutoscalingPoliciesRequest request = ListAutoscalingPoliciesRequest.newBuilder() .setParent(parent == null ? null : parent.toString()) @@ -515,30 +390,24 @@ public final ListAutoscalingPoliciesPagedResponse listAutoscalingPolicies(Region return listAutoscalingPolicies(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Lists autoscaling policies in the project. * - *

Sample code: - * - *


-   * try (AutoscalingPolicyServiceClient autoscalingPolicyServiceClient = AutoscalingPolicyServiceClient.create()) {
-   *   LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]");
-   *   for (AutoscalingPolicy element : autoscalingPolicyServiceClient.listAutoscalingPolicies(parent).iterateAll()) {
-   *     // doThingsWith(element);
-   *   }
-   * }
-   * 
- * * @param parent Required. The "resource name" of the region or location, as described in * https://cloud.google.com/apis/design/resource_names. - *

* For `projects.regions.autoscalingPolicies.list`, the resource name of the region - * has the following format: `projects/{project_id}/regions/{region}` - *

* For `projects.locations.autoscalingPolicies.list`, the resource name of the - * location has the following format: `projects/{project_id}/locations/{location}` + *

    + *
  • For `projects.regions.autoscalingPolicies.list`, the resource name of the region has + * the following format: `projects/{project_id}/regions/{region}` + *
+ *
    + *
  • For `projects.locations.autoscalingPolicies.list`, the resource name of the location + * has the following format: `projects/{project_id}/locations/{location}` + *
+ * * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ - public final ListAutoscalingPoliciesPagedResponse listAutoscalingPolicies(LocationName parent) { + public final ListAutoscalingPoliciesPagedResponse listAutoscalingPolicies(RegionName parent) { ListAutoscalingPoliciesRequest request = ListAutoscalingPoliciesRequest.newBuilder() .setParent(parent == null ? null : parent.toString()) @@ -546,27 +415,21 @@ public final ListAutoscalingPoliciesPagedResponse listAutoscalingPolicies(Locati return listAutoscalingPolicies(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Lists autoscaling policies in the project. * - *

Sample code: - * - *


-   * try (AutoscalingPolicyServiceClient autoscalingPolicyServiceClient = AutoscalingPolicyServiceClient.create()) {
-   *   RegionName parent = RegionName.of("[PROJECT]", "[REGION]");
-   *   for (AutoscalingPolicy element : autoscalingPolicyServiceClient.listAutoscalingPolicies(parent.toString()).iterateAll()) {
-   *     // doThingsWith(element);
-   *   }
-   * }
-   * 
- * * @param parent Required. The "resource name" of the region or location, as described in * https://cloud.google.com/apis/design/resource_names. - *

* For `projects.regions.autoscalingPolicies.list`, the resource name of the region - * has the following format: `projects/{project_id}/regions/{region}` - *

* For `projects.locations.autoscalingPolicies.list`, the resource name of the - * location has the following format: `projects/{project_id}/locations/{location}` + *

    + *
  • For `projects.regions.autoscalingPolicies.list`, the resource name of the region has + * the following format: `projects/{project_id}/regions/{region}` + *
+ *
    + *
  • For `projects.locations.autoscalingPolicies.list`, the resource name of the location + * has the following format: `projects/{project_id}/locations/{location}` + *
+ * * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ public final ListAutoscalingPoliciesPagedResponse listAutoscalingPolicies(String parent) { @@ -575,24 +438,10 @@ public final ListAutoscalingPoliciesPagedResponse listAutoscalingPolicies(String return listAutoscalingPolicies(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Lists autoscaling policies in the project. * - *

Sample code: - * - *


-   * try (AutoscalingPolicyServiceClient autoscalingPolicyServiceClient = AutoscalingPolicyServiceClient.create()) {
-   *   RegionName parent = RegionName.of("[PROJECT]", "[REGION]");
-   *   ListAutoscalingPoliciesRequest request = ListAutoscalingPoliciesRequest.newBuilder()
-   *     .setParent(parent.toString())
-   *     .build();
-   *   for (AutoscalingPolicy element : autoscalingPolicyServiceClient.listAutoscalingPolicies(request).iterateAll()) {
-   *     // doThingsWith(element);
-   *   }
-   * }
-   * 
- * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -601,85 +450,46 @@ public final ListAutoscalingPoliciesPagedResponse listAutoscalingPolicies( return listAutoscalingPoliciesPagedCallable().call(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Lists autoscaling policies in the project. * *

Sample code: - * - *


-   * try (AutoscalingPolicyServiceClient autoscalingPolicyServiceClient = AutoscalingPolicyServiceClient.create()) {
-   *   RegionName parent = RegionName.of("[PROJECT]", "[REGION]");
-   *   ListAutoscalingPoliciesRequest request = ListAutoscalingPoliciesRequest.newBuilder()
-   *     .setParent(parent.toString())
-   *     .build();
-   *   ApiFuture<ListAutoscalingPoliciesPagedResponse> future = autoscalingPolicyServiceClient.listAutoscalingPoliciesPagedCallable().futureCall(request);
-   *   // Do something
-   *   for (AutoscalingPolicy element : future.get().iterateAll()) {
-   *     // doThingsWith(element);
-   *   }
-   * }
-   * 
*/ public final UnaryCallable listAutoscalingPoliciesPagedCallable() { return stub.listAutoscalingPoliciesPagedCallable(); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Lists autoscaling policies in the project. * *

Sample code: - * - *


-   * try (AutoscalingPolicyServiceClient autoscalingPolicyServiceClient = AutoscalingPolicyServiceClient.create()) {
-   *   RegionName parent = RegionName.of("[PROJECT]", "[REGION]");
-   *   ListAutoscalingPoliciesRequest request = ListAutoscalingPoliciesRequest.newBuilder()
-   *     .setParent(parent.toString())
-   *     .build();
-   *   while (true) {
-   *     ListAutoscalingPoliciesResponse response = autoscalingPolicyServiceClient.listAutoscalingPoliciesCallable().call(request);
-   *     for (AutoscalingPolicy element : response.getPoliciesList()) {
-   *       // doThingsWith(element);
-   *     }
-   *     String nextPageToken = response.getNextPageToken();
-   *     if (!Strings.isNullOrEmpty(nextPageToken)) {
-   *       request = request.toBuilder().setPageToken(nextPageToken).build();
-   *     } else {
-   *       break;
-   *     }
-   *   }
-   * }
-   * 
*/ public final UnaryCallable listAutoscalingPoliciesCallable() { return stub.listAutoscalingPoliciesCallable(); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Deletes an autoscaling policy. It is an error to delete an autoscaling policy that is in use by * one or more clusters. * - *

Sample code: - * - *


-   * try (AutoscalingPolicyServiceClient autoscalingPolicyServiceClient = AutoscalingPolicyServiceClient.create()) {
-   *   AutoscalingPolicyName name = AutoscalingPolicyName.ofProjectLocationAutoscalingPolicyName("[PROJECT]", "[LOCATION]", "[AUTOSCALING_POLICY]");
-   *   autoscalingPolicyServiceClient.deleteAutoscalingPolicy(name);
-   * }
-   * 
- * * @param name Required. The "resource name" of the autoscaling policy, as described in * https://cloud.google.com/apis/design/resource_names. - *

* For `projects.regions.autoscalingPolicies.delete`, the resource name of the policy - * has the following format: - * `projects/{project_id}/regions/{region}/autoscalingPolicies/{policy_id}` - *

* For `projects.locations.autoscalingPolicies.delete`, the resource name of the - * policy has the following format: - * `projects/{project_id}/locations/{location}/autoscalingPolicies/{policy_id}` + *

    + *
  • For `projects.regions.autoscalingPolicies.delete`, the resource name of the policy + * has the following format: + * `projects/{project_id}/regions/{region}/autoscalingPolicies/{policy_id}` + *
+ *
    + *
  • For `projects.locations.autoscalingPolicies.delete`, the resource name of the policy + * has the following format: + * `projects/{project_id}/locations/{location}/autoscalingPolicies/{policy_id}` + *
+ * * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ public final void deleteAutoscalingPolicy(AutoscalingPolicyName name) { @@ -690,28 +500,24 @@ public final void deleteAutoscalingPolicy(AutoscalingPolicyName name) { deleteAutoscalingPolicy(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Deletes an autoscaling policy. It is an error to delete an autoscaling policy that is in use by * one or more clusters. * - *

Sample code: - * - *


-   * try (AutoscalingPolicyServiceClient autoscalingPolicyServiceClient = AutoscalingPolicyServiceClient.create()) {
-   *   AutoscalingPolicyName name = AutoscalingPolicyName.ofProjectLocationAutoscalingPolicyName("[PROJECT]", "[LOCATION]", "[AUTOSCALING_POLICY]");
-   *   autoscalingPolicyServiceClient.deleteAutoscalingPolicy(name.toString());
-   * }
-   * 
- * * @param name Required. The "resource name" of the autoscaling policy, as described in * https://cloud.google.com/apis/design/resource_names. - *

* For `projects.regions.autoscalingPolicies.delete`, the resource name of the policy - * has the following format: - * `projects/{project_id}/regions/{region}/autoscalingPolicies/{policy_id}` - *

* For `projects.locations.autoscalingPolicies.delete`, the resource name of the - * policy has the following format: - * `projects/{project_id}/locations/{location}/autoscalingPolicies/{policy_id}` + *

    + *
  • For `projects.regions.autoscalingPolicies.delete`, the resource name of the policy + * has the following format: + * `projects/{project_id}/regions/{region}/autoscalingPolicies/{policy_id}` + *
+ *
    + *
  • For `projects.locations.autoscalingPolicies.delete`, the resource name of the policy + * has the following format: + * `projects/{project_id}/locations/{location}/autoscalingPolicies/{policy_id}` + *
+ * * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ public final void deleteAutoscalingPolicy(String name) { @@ -720,23 +526,11 @@ public final void deleteAutoscalingPolicy(String name) { deleteAutoscalingPolicy(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Deletes an autoscaling policy. It is an error to delete an autoscaling policy that is in use by * one or more clusters. * - *

Sample code: - * - *


-   * try (AutoscalingPolicyServiceClient autoscalingPolicyServiceClient = AutoscalingPolicyServiceClient.create()) {
-   *   AutoscalingPolicyName name = AutoscalingPolicyName.ofProjectLocationAutoscalingPolicyName("[PROJECT]", "[LOCATION]", "[AUTOSCALING_POLICY]");
-   *   DeleteAutoscalingPolicyRequest request = DeleteAutoscalingPolicyRequest.newBuilder()
-   *     .setName(name.toString())
-   *     .build();
-   *   autoscalingPolicyServiceClient.deleteAutoscalingPolicy(request);
-   * }
-   * 
- * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -744,24 +538,12 @@ public final void deleteAutoscalingPolicy(DeleteAutoscalingPolicyRequest request deleteAutoscalingPolicyCallable().call(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Deletes an autoscaling policy. It is an error to delete an autoscaling policy that is in use by * one or more clusters. * *

Sample code: - * - *


-   * try (AutoscalingPolicyServiceClient autoscalingPolicyServiceClient = AutoscalingPolicyServiceClient.create()) {
-   *   AutoscalingPolicyName name = AutoscalingPolicyName.ofProjectLocationAutoscalingPolicyName("[PROJECT]", "[LOCATION]", "[AUTOSCALING_POLICY]");
-   *   DeleteAutoscalingPolicyRequest request = DeleteAutoscalingPolicyRequest.newBuilder()
-   *     .setName(name.toString())
-   *     .build();
-   *   ApiFuture<Void> future = autoscalingPolicyServiceClient.deleteAutoscalingPolicyCallable().futureCall(request);
-   *   // Do something
-   *   future.get();
-   * }
-   * 
*/ public final UnaryCallable deleteAutoscalingPolicyCallable() { diff --git a/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/AutoscalingPolicyServiceSettings.java b/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/AutoscalingPolicyServiceSettings.java index bba18dee..bf828d37 100644 --- a/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/AutoscalingPolicyServiceSettings.java +++ b/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/AutoscalingPolicyServiceSettings.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,6 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.dataproc.v1beta2; import static com.google.cloud.dataproc.v1beta2.AutoscalingPolicyServiceClient.ListAutoscalingPoliciesPagedResponse; @@ -34,7 +35,7 @@ import java.util.List; import javax.annotation.Generated; -// AUTO-GENERATED DOCUMENTATION AND CLASS +// AUTO-GENERATED DOCUMENTATION AND CLASS. /** * Settings class to configure an instance of {@link AutoscalingPolicyServiceClient}. * @@ -51,24 +52,26 @@ * *

For example, to set the total timeout of createAutoscalingPolicy to 30 seconds: * - *

- * 
+ * 
{@code
  * AutoscalingPolicyServiceSettings.Builder autoscalingPolicyServiceSettingsBuilder =
  *     AutoscalingPolicyServiceSettings.newBuilder();
  * autoscalingPolicyServiceSettingsBuilder
  *     .createAutoscalingPolicySettings()
  *     .setRetrySettings(
- *         autoscalingPolicyServiceSettingsBuilder.createAutoscalingPolicySettings().getRetrySettings().toBuilder()
+ *         autoscalingPolicyServiceSettingsBuilder
+ *             .createAutoscalingPolicySettings()
+ *             .getRetrySettings()
+ *             .toBuilder()
  *             .setTotalTimeout(Duration.ofSeconds(30))
  *             .build());
- * AutoscalingPolicyServiceSettings autoscalingPolicyServiceSettings = autoscalingPolicyServiceSettingsBuilder.build();
- * 
- * 
+ * AutoscalingPolicyServiceSettings autoscalingPolicyServiceSettings = + * autoscalingPolicyServiceSettingsBuilder.build(); + * }
*/ -@Generated("by gapic-generator") -@BetaApi +@Generated("by gapic-generator-java") public class AutoscalingPolicyServiceSettings extends ClientSettings { + /** Returns the object with the settings used for calls to createAutoscalingPolicy. */ public UnaryCallSettings createAutoscalingPolicySettings() { @@ -168,18 +171,15 @@ protected AutoscalingPolicyServiceSettings(Builder settingsBuilder) throws IOExc /** Builder for AutoscalingPolicyServiceSettings. */ public static class Builder extends ClientSettings.Builder { + protected Builder() throws IOException { - this((ClientContext) null); + this(((ClientContext) null)); } protected Builder(ClientContext clientContext) { super(AutoscalingPolicyServiceStubSettings.newBuilder(clientContext)); } - private static Builder createDefault() { - return new Builder(AutoscalingPolicyServiceStubSettings.newBuilder()); - } - protected Builder(AutoscalingPolicyServiceSettings settings) { super(settings.getStubSettings().toBuilder()); } @@ -188,11 +188,15 @@ protected Builder(AutoscalingPolicyServiceStubSettings.Builder stubSettings) { super(stubSettings); } + private static Builder createDefault() { + return new Builder(AutoscalingPolicyServiceStubSettings.newBuilder()); + } + public AutoscalingPolicyServiceStubSettings.Builder getStubSettingsBuilder() { return ((AutoscalingPolicyServiceStubSettings.Builder) getStubSettings()); } - // NEXT_MAJOR_VER: remove 'throws Exception' + // NEXT_MAJOR_VER: remove 'throws Exception'. /** * Applies the given settings updater function to all of the unary API methods in this service. * diff --git a/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/ClusterControllerClient.java b/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/ClusterControllerClient.java index ef8757f8..079cb1fd 100644 --- a/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/ClusterControllerClient.java +++ b/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/ClusterControllerClient.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,6 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.dataproc.v1beta2; import com.google.api.core.ApiFunction; @@ -39,7 +40,7 @@ import java.util.concurrent.TimeUnit; import javax.annotation.Generated; -// AUTO-GENERATED DOCUMENTATION AND SERVICE +// AUTO-GENERATED DOCUMENTATION AND CLASS. /** * Service Description: The ClusterControllerService provides methods to manage clusters of Compute * Engine instances. @@ -47,18 +48,7 @@ *

This class provides the ability to make remote calls to the backing service through method * calls that map to API methods. Sample code to get started: * - *

- * 
- * try (ClusterControllerClient clusterControllerClient = ClusterControllerClient.create()) {
- *   String projectId = "";
- *   String region = "";
- *   String clusterName = "";
- *   Cluster response = clusterControllerClient.getCluster(projectId, region, clusterName);
- * }
- * 
- * 
- * - *

Note: close() needs to be called on the clusterControllerClient object to clean up resources + *

Note: close() needs to be called on the ClusterControllerClient object to clean up resources * such as threads. In the example above, try-with-resources is used, which automatically calls * close(). * @@ -87,30 +77,28 @@ * *

To customize credentials: * - *

- * 
+ * 
{@code
  * ClusterControllerSettings clusterControllerSettings =
  *     ClusterControllerSettings.newBuilder()
  *         .setCredentialsProvider(FixedCredentialsProvider.create(myCredentials))
  *         .build();
  * ClusterControllerClient clusterControllerClient =
  *     ClusterControllerClient.create(clusterControllerSettings);
- * 
- * 
+ * }
* - * To customize the endpoint: + *

To customize the endpoint: * - *

- * 
+ * 
{@code
  * ClusterControllerSettings clusterControllerSettings =
  *     ClusterControllerSettings.newBuilder().setEndpoint(myEndpoint).build();
  * ClusterControllerClient clusterControllerClient =
  *     ClusterControllerClient.create(clusterControllerSettings);
- * 
- * 
+ * }
+ * + *

Please refer to the GitHub repository's samples for more quickstart code snippets. */ -@Generated("by gapic-generator") @BetaApi +@Generated("by gapic-generator") public class ClusterControllerClient implements BackgroundResource { private final ClusterControllerSettings settings; private final ClusterControllerStub stub; @@ -132,7 +120,7 @@ public static final ClusterControllerClient create(ClusterControllerSettings set /** * Constructs an instance of ClusterControllerClient, using the given stub for making calls. This - * is for advanced usage - prefer to use ClusterControllerSettings}. + * is for advanced usage - prefer using create(ClusterControllerSettings). */ @BetaApi("A restructuring of stub classes is planned, so this may break in the future") public static final ClusterControllerClient create(ClusterControllerStub stub) { @@ -170,37 +158,22 @@ public ClusterControllerStub getStub() { * Returns the OperationsClient that can be used to query the status of a long-running operation * returned by another API method call. */ - @BetaApi( - "The surface for long-running operations is not stable yet and may change in the future.") public final OperationsClient getOperationsClient() { return operationsClient; } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Creates a cluster in a project. The returned * [Operation.metadata][google.longrunning.Operation.metadata] will be * [ClusterOperationMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1beta2#clusteroperationmetadata). * - *

Sample code: - * - *


-   * try (ClusterControllerClient clusterControllerClient = ClusterControllerClient.create()) {
-   *   String projectId = "";
-   *   String region = "";
-   *   Cluster cluster = Cluster.newBuilder().build();
-   *   Cluster response = clusterControllerClient.createClusterAsync(projectId, region, cluster).get();
-   * }
-   * 
- * * @param projectId Required. The ID of the Google Cloud Platform project that the cluster belongs * to. * @param region Required. The Dataproc region in which to handle the request. * @param cluster Required. The cluster to create. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ - @BetaApi( - "The surface for long-running operations is not stable yet and may change in the future.") public final OperationFuture createClusterAsync( String projectId, String region, Cluster cluster) { CreateClusterRequest request = @@ -212,115 +185,51 @@ public final OperationFuture createClusterAsy return createClusterAsync(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Creates a cluster in a project. The returned * [Operation.metadata][google.longrunning.Operation.metadata] will be * [ClusterOperationMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1beta2#clusteroperationmetadata). * - *

Sample code: - * - *


-   * try (ClusterControllerClient clusterControllerClient = ClusterControllerClient.create()) {
-   *   String projectId = "";
-   *   String region = "";
-   *   Cluster cluster = Cluster.newBuilder().build();
-   *   CreateClusterRequest request = CreateClusterRequest.newBuilder()
-   *     .setProjectId(projectId)
-   *     .setRegion(region)
-   *     .setCluster(cluster)
-   *     .build();
-   *   Cluster response = clusterControllerClient.createClusterAsync(request).get();
-   * }
-   * 
- * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ - @BetaApi( - "The surface for long-running operations is not stable yet and may change in the future.") public final OperationFuture createClusterAsync( CreateClusterRequest request) { return createClusterOperationCallable().futureCall(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Creates a cluster in a project. The returned * [Operation.metadata][google.longrunning.Operation.metadata] will be * [ClusterOperationMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1beta2#clusteroperationmetadata). * *

Sample code: - * - *


-   * try (ClusterControllerClient clusterControllerClient = ClusterControllerClient.create()) {
-   *   String projectId = "";
-   *   String region = "";
-   *   Cluster cluster = Cluster.newBuilder().build();
-   *   CreateClusterRequest request = CreateClusterRequest.newBuilder()
-   *     .setProjectId(projectId)
-   *     .setRegion(region)
-   *     .setCluster(cluster)
-   *     .build();
-   *   OperationFuture<Cluster, ClusterOperationMetadata> future = clusterControllerClient.createClusterOperationCallable().futureCall(request);
-   *   // Do something
-   *   Cluster response = future.get();
-   * }
-   * 
*/ - @BetaApi("The surface for use by generated code is not stable yet and may change in the future.") public final OperationCallable createClusterOperationCallable() { return stub.createClusterOperationCallable(); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Creates a cluster in a project. The returned * [Operation.metadata][google.longrunning.Operation.metadata] will be * [ClusterOperationMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1beta2#clusteroperationmetadata). * *

Sample code: - * - *


-   * try (ClusterControllerClient clusterControllerClient = ClusterControllerClient.create()) {
-   *   String projectId = "";
-   *   String region = "";
-   *   Cluster cluster = Cluster.newBuilder().build();
-   *   CreateClusterRequest request = CreateClusterRequest.newBuilder()
-   *     .setProjectId(projectId)
-   *     .setRegion(region)
-   *     .setCluster(cluster)
-   *     .build();
-   *   ApiFuture<Operation> future = clusterControllerClient.createClusterCallable().futureCall(request);
-   *   // Do something
-   *   Operation response = future.get();
-   * }
-   * 
*/ public final UnaryCallable createClusterCallable() { return stub.createClusterCallable(); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Updates a cluster in a project. The returned * [Operation.metadata][google.longrunning.Operation.metadata] will be * [ClusterOperationMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1beta2#clusteroperationmetadata). * - *

Sample code: - * - *


-   * try (ClusterControllerClient clusterControllerClient = ClusterControllerClient.create()) {
-   *   String projectId = "";
-   *   String region = "";
-   *   String clusterName = "";
-   *   Cluster cluster = Cluster.newBuilder().build();
-   *   FieldMask updateMask = FieldMask.newBuilder().build();
-   *   Cluster response = clusterControllerClient.updateClusterAsync(projectId, region, clusterName, cluster, updateMask).get();
-   * }
-   * 
- * * @param projectId Required. The ID of the Google Cloud Platform project the cluster belongs to. * @param region Required. The Dataproc region in which to handle the request. * @param clusterName Required. The cluster name. @@ -353,8 +262,6 @@ public final UnaryCallable createClusterCallabl * change autoscaling policies</td> </tr> </table> * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ - @BetaApi( - "The surface for long-running operations is not stable yet and may change in the future.") public final OperationFuture updateClusterAsync( String projectId, String region, String clusterName, Cluster cluster, FieldMask updateMask) { UpdateClusterRequest request = @@ -368,133 +275,57 @@ public final OperationFuture updateClusterAsy return updateClusterAsync(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Updates a cluster in a project. The returned * [Operation.metadata][google.longrunning.Operation.metadata] will be * [ClusterOperationMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1beta2#clusteroperationmetadata). * - *

Sample code: - * - *


-   * try (ClusterControllerClient clusterControllerClient = ClusterControllerClient.create()) {
-   *   String projectId = "";
-   *   String region = "";
-   *   String clusterName = "";
-   *   Cluster cluster = Cluster.newBuilder().build();
-   *   FieldMask updateMask = FieldMask.newBuilder().build();
-   *   UpdateClusterRequest request = UpdateClusterRequest.newBuilder()
-   *     .setProjectId(projectId)
-   *     .setRegion(region)
-   *     .setClusterName(clusterName)
-   *     .setCluster(cluster)
-   *     .setUpdateMask(updateMask)
-   *     .build();
-   *   Cluster response = clusterControllerClient.updateClusterAsync(request).get();
-   * }
-   * 
- * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ - @BetaApi( - "The surface for long-running operations is not stable yet and may change in the future.") public final OperationFuture updateClusterAsync( UpdateClusterRequest request) { return updateClusterOperationCallable().futureCall(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Updates a cluster in a project. The returned * [Operation.metadata][google.longrunning.Operation.metadata] will be * [ClusterOperationMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1beta2#clusteroperationmetadata). * *

Sample code: - * - *


-   * try (ClusterControllerClient clusterControllerClient = ClusterControllerClient.create()) {
-   *   String projectId = "";
-   *   String region = "";
-   *   String clusterName = "";
-   *   Cluster cluster = Cluster.newBuilder().build();
-   *   FieldMask updateMask = FieldMask.newBuilder().build();
-   *   UpdateClusterRequest request = UpdateClusterRequest.newBuilder()
-   *     .setProjectId(projectId)
-   *     .setRegion(region)
-   *     .setClusterName(clusterName)
-   *     .setCluster(cluster)
-   *     .setUpdateMask(updateMask)
-   *     .build();
-   *   OperationFuture<Cluster, ClusterOperationMetadata> future = clusterControllerClient.updateClusterOperationCallable().futureCall(request);
-   *   // Do something
-   *   Cluster response = future.get();
-   * }
-   * 
*/ - @BetaApi("The surface for use by generated code is not stable yet and may change in the future.") public final OperationCallable updateClusterOperationCallable() { return stub.updateClusterOperationCallable(); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Updates a cluster in a project. The returned * [Operation.metadata][google.longrunning.Operation.metadata] will be * [ClusterOperationMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1beta2#clusteroperationmetadata). * *

Sample code: - * - *


-   * try (ClusterControllerClient clusterControllerClient = ClusterControllerClient.create()) {
-   *   String projectId = "";
-   *   String region = "";
-   *   String clusterName = "";
-   *   Cluster cluster = Cluster.newBuilder().build();
-   *   FieldMask updateMask = FieldMask.newBuilder().build();
-   *   UpdateClusterRequest request = UpdateClusterRequest.newBuilder()
-   *     .setProjectId(projectId)
-   *     .setRegion(region)
-   *     .setClusterName(clusterName)
-   *     .setCluster(cluster)
-   *     .setUpdateMask(updateMask)
-   *     .build();
-   *   ApiFuture<Operation> future = clusterControllerClient.updateClusterCallable().futureCall(request);
-   *   // Do something
-   *   Operation response = future.get();
-   * }
-   * 
*/ public final UnaryCallable updateClusterCallable() { return stub.updateClusterCallable(); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Deletes a cluster in a project. The returned * [Operation.metadata][google.longrunning.Operation.metadata] will be * [ClusterOperationMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1beta2#clusteroperationmetadata). * - *

Sample code: - * - *


-   * try (ClusterControllerClient clusterControllerClient = ClusterControllerClient.create()) {
-   *   String projectId = "";
-   *   String region = "";
-   *   String clusterName = "";
-   *   clusterControllerClient.deleteClusterAsync(projectId, region, clusterName).get();
-   * }
-   * 
- * * @param projectId Required. The ID of the Google Cloud Platform project that the cluster belongs * to. * @param region Required. The Dataproc region in which to handle the request. * @param clusterName Required. The cluster name. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ - @BetaApi( - "The surface for long-running operations is not stable yet and may change in the future.") public final OperationFuture deleteClusterAsync( String projectId, String region, String clusterName) { DeleteClusterRequest request = @@ -506,245 +337,49 @@ public final OperationFuture deleteClusterAsync return deleteClusterAsync(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Deletes a cluster in a project. The returned * [Operation.metadata][google.longrunning.Operation.metadata] will be * [ClusterOperationMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1beta2#clusteroperationmetadata). * - *

Sample code: - * - *


-   * try (ClusterControllerClient clusterControllerClient = ClusterControllerClient.create()) {
-   *   String projectId = "";
-   *   String region = "";
-   *   String clusterName = "";
-   *   DeleteClusterRequest request = DeleteClusterRequest.newBuilder()
-   *     .setProjectId(projectId)
-   *     .setRegion(region)
-   *     .setClusterName(clusterName)
-   *     .build();
-   *   clusterControllerClient.deleteClusterAsync(request).get();
-   * }
-   * 
- * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ - @BetaApi( - "The surface for long-running operations is not stable yet and may change in the future.") public final OperationFuture deleteClusterAsync( DeleteClusterRequest request) { return deleteClusterOperationCallable().futureCall(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Deletes a cluster in a project. The returned * [Operation.metadata][google.longrunning.Operation.metadata] will be * [ClusterOperationMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1beta2#clusteroperationmetadata). * *

Sample code: - * - *


-   * try (ClusterControllerClient clusterControllerClient = ClusterControllerClient.create()) {
-   *   String projectId = "";
-   *   String region = "";
-   *   String clusterName = "";
-   *   DeleteClusterRequest request = DeleteClusterRequest.newBuilder()
-   *     .setProjectId(projectId)
-   *     .setRegion(region)
-   *     .setClusterName(clusterName)
-   *     .build();
-   *   OperationFuture<Empty, ClusterOperationMetadata> future = clusterControllerClient.deleteClusterOperationCallable().futureCall(request);
-   *   // Do something
-   *   future.get();
-   * }
-   * 
*/ - @BetaApi("The surface for use by generated code is not stable yet and may change in the future.") public final OperationCallable deleteClusterOperationCallable() { return stub.deleteClusterOperationCallable(); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Deletes a cluster in a project. The returned * [Operation.metadata][google.longrunning.Operation.metadata] will be * [ClusterOperationMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1beta2#clusteroperationmetadata). * *

Sample code: - * - *


-   * try (ClusterControllerClient clusterControllerClient = ClusterControllerClient.create()) {
-   *   String projectId = "";
-   *   String region = "";
-   *   String clusterName = "";
-   *   DeleteClusterRequest request = DeleteClusterRequest.newBuilder()
-   *     .setProjectId(projectId)
-   *     .setRegion(region)
-   *     .setClusterName(clusterName)
-   *     .build();
-   *   ApiFuture<Operation> future = clusterControllerClient.deleteClusterCallable().futureCall(request);
-   *   // Do something
-   *   future.get();
-   * }
-   * 
*/ public final UnaryCallable deleteClusterCallable() { return stub.deleteClusterCallable(); } - // AUTO-GENERATED DOCUMENTATION AND METHOD - /** - * Gets cluster diagnostic information. The returned - * [Operation.metadata][google.longrunning.Operation.metadata] will be - * [ClusterOperationMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1beta2#clusteroperationmetadata). - * After the operation completes, [Operation.response][google.longrunning.Operation.response] - * contains [Empty][google.protobuf.Empty]. - * - *

Sample code: - * - *


-   * try (ClusterControllerClient clusterControllerClient = ClusterControllerClient.create()) {
-   *   String projectId = "";
-   *   String region = "";
-   *   String clusterName = "";
-   *   clusterControllerClient.diagnoseClusterAsync(projectId, region, clusterName).get();
-   * }
-   * 
- * - * @param projectId Required. The ID of the Google Cloud Platform project that the cluster belongs - * to. - * @param region Required. The Dataproc region in which to handle the request. - * @param clusterName Required. The cluster name. - * @throws com.google.api.gax.rpc.ApiException if the remote call fails - */ - @BetaApi( - "The surface for long-running operations is not stable yet and may change in the future.") - public final OperationFuture diagnoseClusterAsync( - String projectId, String region, String clusterName) { - DiagnoseClusterRequest request = - DiagnoseClusterRequest.newBuilder() - .setProjectId(projectId) - .setRegion(region) - .setClusterName(clusterName) - .build(); - return diagnoseClusterAsync(request); - } - - // AUTO-GENERATED DOCUMENTATION AND METHOD - /** - * Gets cluster diagnostic information. The returned - * [Operation.metadata][google.longrunning.Operation.metadata] will be - * [ClusterOperationMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1beta2#clusteroperationmetadata). - * After the operation completes, [Operation.response][google.longrunning.Operation.response] - * contains [Empty][google.protobuf.Empty]. - * - *

Sample code: - * - *


-   * try (ClusterControllerClient clusterControllerClient = ClusterControllerClient.create()) {
-   *   String projectId = "";
-   *   String region = "";
-   *   String clusterName = "";
-   *   DiagnoseClusterRequest request = DiagnoseClusterRequest.newBuilder()
-   *     .setProjectId(projectId)
-   *     .setRegion(region)
-   *     .setClusterName(clusterName)
-   *     .build();
-   *   clusterControllerClient.diagnoseClusterAsync(request).get();
-   * }
-   * 
- * - * @param request The request object containing all of the parameters for the API call. - * @throws com.google.api.gax.rpc.ApiException if the remote call fails - */ - @BetaApi( - "The surface for long-running operations is not stable yet and may change in the future.") - public final OperationFuture diagnoseClusterAsync( - DiagnoseClusterRequest request) { - return diagnoseClusterOperationCallable().futureCall(request); - } - - // AUTO-GENERATED DOCUMENTATION AND METHOD - /** - * Gets cluster diagnostic information. The returned - * [Operation.metadata][google.longrunning.Operation.metadata] will be - * [ClusterOperationMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1beta2#clusteroperationmetadata). - * After the operation completes, [Operation.response][google.longrunning.Operation.response] - * contains [Empty][google.protobuf.Empty]. - * - *

Sample code: - * - *


-   * try (ClusterControllerClient clusterControllerClient = ClusterControllerClient.create()) {
-   *   String projectId = "";
-   *   String region = "";
-   *   String clusterName = "";
-   *   DiagnoseClusterRequest request = DiagnoseClusterRequest.newBuilder()
-   *     .setProjectId(projectId)
-   *     .setRegion(region)
-   *     .setClusterName(clusterName)
-   *     .build();
-   *   OperationFuture<Empty, ClusterOperationMetadata> future = clusterControllerClient.diagnoseClusterOperationCallable().futureCall(request);
-   *   // Do something
-   *   future.get();
-   * }
-   * 
- */ - @BetaApi("The surface for use by generated code is not stable yet and may change in the future.") - public final OperationCallable - diagnoseClusterOperationCallable() { - return stub.diagnoseClusterOperationCallable(); - } - - // AUTO-GENERATED DOCUMENTATION AND METHOD - /** - * Gets cluster diagnostic information. The returned - * [Operation.metadata][google.longrunning.Operation.metadata] will be - * [ClusterOperationMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1beta2#clusteroperationmetadata). - * After the operation completes, [Operation.response][google.longrunning.Operation.response] - * contains [Empty][google.protobuf.Empty]. - * - *

Sample code: - * - *


-   * try (ClusterControllerClient clusterControllerClient = ClusterControllerClient.create()) {
-   *   String projectId = "";
-   *   String region = "";
-   *   String clusterName = "";
-   *   DiagnoseClusterRequest request = DiagnoseClusterRequest.newBuilder()
-   *     .setProjectId(projectId)
-   *     .setRegion(region)
-   *     .setClusterName(clusterName)
-   *     .build();
-   *   ApiFuture<Operation> future = clusterControllerClient.diagnoseClusterCallable().futureCall(request);
-   *   // Do something
-   *   future.get();
-   * }
-   * 
- */ - public final UnaryCallable diagnoseClusterCallable() { - return stub.diagnoseClusterCallable(); - } - - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Gets the resource representation for a cluster in a project. * - *

Sample code: - * - *


-   * try (ClusterControllerClient clusterControllerClient = ClusterControllerClient.create()) {
-   *   String projectId = "";
-   *   String region = "";
-   *   String clusterName = "";
-   *   Cluster response = clusterControllerClient.getCluster(projectId, region, clusterName);
-   * }
-   * 
- * * @param projectId Required. The ID of the Google Cloud Platform project that the cluster belongs * to. * @param region Required. The Dataproc region in which to handle the request. @@ -761,26 +396,10 @@ public final Cluster getCluster(String projectId, String region, String clusterN return getCluster(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Gets the resource representation for a cluster in a project. * - *

Sample code: - * - *


-   * try (ClusterControllerClient clusterControllerClient = ClusterControllerClient.create()) {
-   *   String projectId = "";
-   *   String region = "";
-   *   String clusterName = "";
-   *   GetClusterRequest request = GetClusterRequest.newBuilder()
-   *     .setProjectId(projectId)
-   *     .setRegion(region)
-   *     .setClusterName(clusterName)
-   *     .build();
-   *   Cluster response = clusterControllerClient.getCluster(request);
-   * }
-   * 
- * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -788,48 +407,20 @@ public final Cluster getCluster(GetClusterRequest request) { return getClusterCallable().call(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Gets the resource representation for a cluster in a project. * *

Sample code: - * - *


-   * try (ClusterControllerClient clusterControllerClient = ClusterControllerClient.create()) {
-   *   String projectId = "";
-   *   String region = "";
-   *   String clusterName = "";
-   *   GetClusterRequest request = GetClusterRequest.newBuilder()
-   *     .setProjectId(projectId)
-   *     .setRegion(region)
-   *     .setClusterName(clusterName)
-   *     .build();
-   *   ApiFuture<Cluster> future = clusterControllerClient.getClusterCallable().futureCall(request);
-   *   // Do something
-   *   Cluster response = future.get();
-   * }
-   * 
*/ public final UnaryCallable getClusterCallable() { return stub.getClusterCallable(); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Lists all regions/{region}/clusters in a project alphabetically. * - *

Sample code: - * - *


-   * try (ClusterControllerClient clusterControllerClient = ClusterControllerClient.create()) {
-   *   String projectId = "";
-   *   String region = "";
-   *   for (Cluster element : clusterControllerClient.listClusters(projectId, region).iterateAll()) {
-   *     // doThingsWith(element);
-   *   }
-   * }
-   * 
- * * @param projectId Required. The ID of the Google Cloud Platform project that the cluster belongs * to. * @param region Required. The Dataproc region in which to handle the request. @@ -841,23 +432,10 @@ public final ListClustersPagedResponse listClusters(String projectId, String reg return listClusters(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Lists all regions/{region}/clusters in a project alphabetically. * - *

Sample code: - * - *


-   * try (ClusterControllerClient clusterControllerClient = ClusterControllerClient.create()) {
-   *   String projectId = "";
-   *   String region = "";
-   *   String filter = "";
-   *   for (Cluster element : clusterControllerClient.listClusters(projectId, region, filter).iterateAll()) {
-   *     // doThingsWith(element);
-   *   }
-   * }
-   * 
- * * @param projectId Required. The ID of the Google Cloud Platform project that the cluster belongs * to. * @param region Required. The Dataproc region in which to handle the request. @@ -888,26 +466,10 @@ public final ListClustersPagedResponse listClusters( return listClusters(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Lists all regions/{region}/clusters in a project alphabetically. * - *

Sample code: - * - *


-   * try (ClusterControllerClient clusterControllerClient = ClusterControllerClient.create()) {
-   *   String projectId = "";
-   *   String region = "";
-   *   ListClustersRequest request = ListClustersRequest.newBuilder()
-   *     .setProjectId(projectId)
-   *     .setRegion(region)
-   *     .build();
-   *   for (Cluster element : clusterControllerClient.listClusters(request).iterateAll()) {
-   *     // doThingsWith(element);
-   *   }
-   * }
-   * 
- * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -915,66 +477,97 @@ public final ListClustersPagedResponse listClusters(ListClustersRequest request) return listClustersPagedCallable().call(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Lists all regions/{region}/clusters in a project alphabetically. * *

Sample code: - * - *


-   * try (ClusterControllerClient clusterControllerClient = ClusterControllerClient.create()) {
-   *   String projectId = "";
-   *   String region = "";
-   *   ListClustersRequest request = ListClustersRequest.newBuilder()
-   *     .setProjectId(projectId)
-   *     .setRegion(region)
-   *     .build();
-   *   ApiFuture<ListClustersPagedResponse> future = clusterControllerClient.listClustersPagedCallable().futureCall(request);
-   *   // Do something
-   *   for (Cluster element : future.get().iterateAll()) {
-   *     // doThingsWith(element);
-   *   }
-   * }
-   * 
*/ public final UnaryCallable listClustersPagedCallable() { return stub.listClustersPagedCallable(); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Lists all regions/{region}/clusters in a project alphabetically. * *

Sample code: - * - *


-   * try (ClusterControllerClient clusterControllerClient = ClusterControllerClient.create()) {
-   *   String projectId = "";
-   *   String region = "";
-   *   ListClustersRequest request = ListClustersRequest.newBuilder()
-   *     .setProjectId(projectId)
-   *     .setRegion(region)
-   *     .build();
-   *   while (true) {
-   *     ListClustersResponse response = clusterControllerClient.listClustersCallable().call(request);
-   *     for (Cluster element : response.getClustersList()) {
-   *       // doThingsWith(element);
-   *     }
-   *     String nextPageToken = response.getNextPageToken();
-   *     if (!Strings.isNullOrEmpty(nextPageToken)) {
-   *       request = request.toBuilder().setPageToken(nextPageToken).build();
-   *     } else {
-   *       break;
-   *     }
-   *   }
-   * }
-   * 
*/ public final UnaryCallable listClustersCallable() { return stub.listClustersCallable(); } + // AUTO-GENERATED DOCUMENTATION AND METHOD. + /** + * Gets cluster diagnostic information. The returned + * [Operation.metadata][google.longrunning.Operation.metadata] will be + * [ClusterOperationMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1beta2#clusteroperationmetadata). + * After the operation completes, [Operation.response][google.longrunning.Operation.response] + * contains [Empty][google.protobuf.Empty]. + * + * @param projectId Required. The ID of the Google Cloud Platform project that the cluster belongs + * to. + * @param region Required. The Dataproc region in which to handle the request. + * @param clusterName Required. The cluster name. + * @throws com.google.api.gax.rpc.ApiException if the remote call fails + */ + public final OperationFuture diagnoseClusterAsync( + String projectId, String region, String clusterName) { + DiagnoseClusterRequest request = + DiagnoseClusterRequest.newBuilder() + .setProjectId(projectId) + .setRegion(region) + .setClusterName(clusterName) + .build(); + return diagnoseClusterAsync(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD. + /** + * Gets cluster diagnostic information. The returned + * [Operation.metadata][google.longrunning.Operation.metadata] will be + * [ClusterOperationMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1beta2#clusteroperationmetadata). + * After the operation completes, [Operation.response][google.longrunning.Operation.response] + * contains [Empty][google.protobuf.Empty]. + * + * @param request The request object containing all of the parameters for the API call. + * @throws com.google.api.gax.rpc.ApiException if the remote call fails + */ + public final OperationFuture diagnoseClusterAsync( + DiagnoseClusterRequest request) { + return diagnoseClusterOperationCallable().futureCall(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD. + /** + * Gets cluster diagnostic information. The returned + * [Operation.metadata][google.longrunning.Operation.metadata] will be + * [ClusterOperationMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1beta2#clusteroperationmetadata). + * After the operation completes, [Operation.response][google.longrunning.Operation.response] + * contains [Empty][google.protobuf.Empty]. + * + *

Sample code: + */ + public final OperationCallable + diagnoseClusterOperationCallable() { + return stub.diagnoseClusterOperationCallable(); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD. + /** + * Gets cluster diagnostic information. The returned + * [Operation.metadata][google.longrunning.Operation.metadata] will be + * [ClusterOperationMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1beta2#clusteroperationmetadata). + * After the operation completes, [Operation.response][google.longrunning.Operation.response] + * contains [Empty][google.protobuf.Empty]. + * + *

Sample code: + */ + public final UnaryCallable diagnoseClusterCallable() { + return stub.diagnoseClusterCallable(); + } + @Override public final void close() { stub.close(); diff --git a/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/ClusterControllerSettings.java b/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/ClusterControllerSettings.java index bc15c2a9..de87b9c1 100644 --- a/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/ClusterControllerSettings.java +++ b/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/ClusterControllerSettings.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,6 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.dataproc.v1beta2; import static com.google.cloud.dataproc.v1beta2.ClusterControllerClient.ListClustersPagedResponse; @@ -36,7 +37,7 @@ import java.util.List; import javax.annotation.Generated; -// AUTO-GENERATED DOCUMENTATION AND CLASS +// AUTO-GENERATED DOCUMENTATION AND CLASS. /** * Settings class to configure an instance of {@link ClusterControllerClient}. * @@ -53,31 +54,30 @@ * *

For example, to set the total timeout of getCluster to 30 seconds: * - *

- * 
+ * 
{@code
  * ClusterControllerSettings.Builder clusterControllerSettingsBuilder =
  *     ClusterControllerSettings.newBuilder();
  * clusterControllerSettingsBuilder
  *     .getClusterSettings()
  *     .setRetrySettings(
- *         clusterControllerSettingsBuilder.getClusterSettings().getRetrySettings().toBuilder()
+ *         clusterControllerSettingsBuilder
+ *             .getClusterSettings()
+ *             .getRetrySettings()
+ *             .toBuilder()
  *             .setTotalTimeout(Duration.ofSeconds(30))
  *             .build());
  * ClusterControllerSettings clusterControllerSettings = clusterControllerSettingsBuilder.build();
- * 
- * 
+ * }
*/ -@Generated("by gapic-generator") -@BetaApi +@Generated("by gapic-generator-java") public class ClusterControllerSettings extends ClientSettings { + /** Returns the object with the settings used for calls to createCluster. */ public UnaryCallSettings createClusterSettings() { return ((ClusterControllerStubSettings) getStubSettings()).createClusterSettings(); } /** Returns the object with the settings used for calls to createCluster. */ - @BetaApi( - "The surface for long-running operations is not stable yet and may change in the future.") public OperationCallSettings createClusterOperationSettings() { return ((ClusterControllerStubSettings) getStubSettings()).createClusterOperationSettings(); @@ -89,8 +89,6 @@ public UnaryCallSettings updateClusterSettings( } /** Returns the object with the settings used for calls to updateCluster. */ - @BetaApi( - "The surface for long-running operations is not stable yet and may change in the future.") public OperationCallSettings updateClusterOperationSettings() { return ((ClusterControllerStubSettings) getStubSettings()).updateClusterOperationSettings(); @@ -102,26 +100,11 @@ public UnaryCallSettings deleteClusterSettings( } /** Returns the object with the settings used for calls to deleteCluster. */ - @BetaApi( - "The surface for long-running operations is not stable yet and may change in the future.") public OperationCallSettings deleteClusterOperationSettings() { return ((ClusterControllerStubSettings) getStubSettings()).deleteClusterOperationSettings(); } - /** Returns the object with the settings used for calls to diagnoseCluster. */ - public UnaryCallSettings diagnoseClusterSettings() { - return ((ClusterControllerStubSettings) getStubSettings()).diagnoseClusterSettings(); - } - - /** Returns the object with the settings used for calls to diagnoseCluster. */ - @BetaApi( - "The surface for long-running operations is not stable yet and may change in the future.") - public OperationCallSettings - diagnoseClusterOperationSettings() { - return ((ClusterControllerStubSettings) getStubSettings()).diagnoseClusterOperationSettings(); - } - /** Returns the object with the settings used for calls to getCluster. */ public UnaryCallSettings getClusterSettings() { return ((ClusterControllerStubSettings) getStubSettings()).getClusterSettings(); @@ -133,6 +116,17 @@ public UnaryCallSettings getClusterSettings() { return ((ClusterControllerStubSettings) getStubSettings()).listClustersSettings(); } + /** Returns the object with the settings used for calls to diagnoseCluster. */ + public UnaryCallSettings diagnoseClusterSettings() { + return ((ClusterControllerStubSettings) getStubSettings()).diagnoseClusterSettings(); + } + + /** Returns the object with the settings used for calls to diagnoseCluster. */ + public OperationCallSettings + diagnoseClusterOperationSettings() { + return ((ClusterControllerStubSettings) getStubSettings()).diagnoseClusterOperationSettings(); + } + public static final ClusterControllerSettings create(ClusterControllerStubSettings stub) throws IOException { return new ClusterControllerSettings.Builder(stub.toBuilder()).build(); @@ -193,18 +187,15 @@ protected ClusterControllerSettings(Builder settingsBuilder) throws IOException /** Builder for ClusterControllerSettings. */ public static class Builder extends ClientSettings.Builder { + protected Builder() throws IOException { - this((ClientContext) null); + this(((ClientContext) null)); } protected Builder(ClientContext clientContext) { super(ClusterControllerStubSettings.newBuilder(clientContext)); } - private static Builder createDefault() { - return new Builder(ClusterControllerStubSettings.newBuilder()); - } - protected Builder(ClusterControllerSettings settings) { super(settings.getStubSettings().toBuilder()); } @@ -213,11 +204,15 @@ protected Builder(ClusterControllerStubSettings.Builder stubSettings) { super(stubSettings); } + private static Builder createDefault() { + return new Builder(ClusterControllerStubSettings.newBuilder()); + } + public ClusterControllerStubSettings.Builder getStubSettingsBuilder() { return ((ClusterControllerStubSettings.Builder) getStubSettings()); } - // NEXT_MAJOR_VER: remove 'throws Exception' + // NEXT_MAJOR_VER: remove 'throws Exception'. /** * Applies the given settings updater function to all of the unary API methods in this service. * @@ -236,8 +231,6 @@ public UnaryCallSettings.Builder createClusterS } /** Returns the builder for the settings used for calls to createCluster. */ - @BetaApi( - "The surface for long-running operations is not stable yet and may change in the future.") public OperationCallSettings.Builder createClusterOperationSettings() { return getStubSettingsBuilder().createClusterOperationSettings(); @@ -249,8 +242,6 @@ public UnaryCallSettings.Builder updateClusterS } /** Returns the builder for the settings used for calls to updateCluster. */ - @BetaApi( - "The surface for long-running operations is not stable yet and may change in the future.") public OperationCallSettings.Builder updateClusterOperationSettings() { return getStubSettingsBuilder().updateClusterOperationSettings(); @@ -262,26 +253,11 @@ public UnaryCallSettings.Builder deleteClusterS } /** Returns the builder for the settings used for calls to deleteCluster. */ - @BetaApi( - "The surface for long-running operations is not stable yet and may change in the future.") public OperationCallSettings.Builder deleteClusterOperationSettings() { return getStubSettingsBuilder().deleteClusterOperationSettings(); } - /** Returns the builder for the settings used for calls to diagnoseCluster. */ - public UnaryCallSettings.Builder diagnoseClusterSettings() { - return getStubSettingsBuilder().diagnoseClusterSettings(); - } - - /** Returns the builder for the settings used for calls to diagnoseCluster. */ - @BetaApi( - "The surface for long-running operations is not stable yet and may change in the future.") - public OperationCallSettings.Builder - diagnoseClusterOperationSettings() { - return getStubSettingsBuilder().diagnoseClusterOperationSettings(); - } - /** Returns the builder for the settings used for calls to getCluster. */ public UnaryCallSettings.Builder getClusterSettings() { return getStubSettingsBuilder().getClusterSettings(); @@ -294,6 +270,17 @@ public UnaryCallSettings.Builder getClusterSettings( return getStubSettingsBuilder().listClustersSettings(); } + /** Returns the builder for the settings used for calls to diagnoseCluster. */ + public UnaryCallSettings.Builder diagnoseClusterSettings() { + return getStubSettingsBuilder().diagnoseClusterSettings(); + } + + /** Returns the builder for the settings used for calls to diagnoseCluster. */ + public OperationCallSettings.Builder + diagnoseClusterOperationSettings() { + return getStubSettingsBuilder().diagnoseClusterOperationSettings(); + } + @Override public ClusterControllerSettings build() throws IOException { return new ClusterControllerSettings(this); diff --git a/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/JobControllerClient.java b/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/JobControllerClient.java index 317f91de..2dcf938f 100644 --- a/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/JobControllerClient.java +++ b/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/JobControllerClient.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,6 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.dataproc.v1beta2; import com.google.api.core.ApiFunction; @@ -38,25 +39,14 @@ import java.util.concurrent.TimeUnit; import javax.annotation.Generated; -// AUTO-GENERATED DOCUMENTATION AND SERVICE +// AUTO-GENERATED DOCUMENTATION AND CLASS. /** * Service Description: The JobController provides methods to manage jobs. * *

This class provides the ability to make remote calls to the backing service through method * calls that map to API methods. Sample code to get started: * - *

- * 
- * try (JobControllerClient jobControllerClient = JobControllerClient.create()) {
- *   String projectId = "";
- *   String region = "";
- *   Job job = Job.newBuilder().build();
- *   Job response = jobControllerClient.submitJob(projectId, region, job);
- * }
- * 
- * 
- * - *

Note: close() needs to be called on the jobControllerClient object to clean up resources such + *

Note: close() needs to be called on the JobControllerClient object to clean up resources such * as threads. In the example above, try-with-resources is used, which automatically calls close(). * *

The surface of this class includes several types of Java methods for each of the API's @@ -84,30 +74,26 @@ * *

To customize credentials: * - *

- * 
+ * 
{@code
  * JobControllerSettings jobControllerSettings =
  *     JobControllerSettings.newBuilder()
  *         .setCredentialsProvider(FixedCredentialsProvider.create(myCredentials))
  *         .build();
- * JobControllerClient jobControllerClient =
- *     JobControllerClient.create(jobControllerSettings);
- * 
- * 
+ * JobControllerClient jobControllerClient = JobControllerClient.create(jobControllerSettings); + * }
* - * To customize the endpoint: + *

To customize the endpoint: * - *

- * 
+ * 
{@code
  * JobControllerSettings jobControllerSettings =
  *     JobControllerSettings.newBuilder().setEndpoint(myEndpoint).build();
- * JobControllerClient jobControllerClient =
- *     JobControllerClient.create(jobControllerSettings);
- * 
- * 
+ * JobControllerClient jobControllerClient = JobControllerClient.create(jobControllerSettings); + * }
+ * + *

Please refer to the GitHub repository's samples for more quickstart code snippets. */ -@Generated("by gapic-generator") @BetaApi +@Generated("by gapic-generator") public class JobControllerClient implements BackgroundResource { private final JobControllerSettings settings; private final JobControllerStub stub; @@ -129,7 +115,7 @@ public static final JobControllerClient create(JobControllerSettings settings) /** * Constructs an instance of JobControllerClient, using the given stub for making calls. This is - * for advanced usage - prefer to use JobControllerSettings}. + * for advanced usage - prefer using create(JobControllerSettings). */ @BetaApi("A restructuring of stub classes is planned, so this may break in the future") public static final JobControllerClient create(JobControllerStub stub) { @@ -167,27 +153,14 @@ public JobControllerStub getStub() { * Returns the OperationsClient that can be used to query the status of a long-running operation * returned by another API method call. */ - @BetaApi( - "The surface for long-running operations is not stable yet and may change in the future.") public final OperationsClient getOperationsClient() { return operationsClient; } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Submits a job to a cluster. * - *

Sample code: - * - *


-   * try (JobControllerClient jobControllerClient = JobControllerClient.create()) {
-   *   String projectId = "";
-   *   String region = "";
-   *   Job job = Job.newBuilder().build();
-   *   Job response = jobControllerClient.submitJob(projectId, region, job);
-   * }
-   * 
- * * @param projectId Required. The ID of the Google Cloud Platform project that the job belongs to. * @param region Required. The Dataproc region in which to handle the request. * @param job Required. The job resource. @@ -199,26 +172,10 @@ public final Job submitJob(String projectId, String region, Job job) { return submitJob(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Submits a job to a cluster. * - *

Sample code: - * - *


-   * try (JobControllerClient jobControllerClient = JobControllerClient.create()) {
-   *   String projectId = "";
-   *   String region = "";
-   *   Job job = Job.newBuilder().build();
-   *   SubmitJobRequest request = SubmitJobRequest.newBuilder()
-   *     .setProjectId(projectId)
-   *     .setRegion(region)
-   *     .setJob(job)
-   *     .build();
-   *   Job response = jobControllerClient.submitJob(request);
-   * }
-   * 
- * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -226,54 +183,25 @@ public final Job submitJob(SubmitJobRequest request) { return submitJobCallable().call(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Submits a job to a cluster. * *

Sample code: - * - *


-   * try (JobControllerClient jobControllerClient = JobControllerClient.create()) {
-   *   String projectId = "";
-   *   String region = "";
-   *   Job job = Job.newBuilder().build();
-   *   SubmitJobRequest request = SubmitJobRequest.newBuilder()
-   *     .setProjectId(projectId)
-   *     .setRegion(region)
-   *     .setJob(job)
-   *     .build();
-   *   ApiFuture<Job> future = jobControllerClient.submitJobCallable().futureCall(request);
-   *   // Do something
-   *   Job response = future.get();
-   * }
-   * 
*/ public final UnaryCallable submitJobCallable() { return stub.submitJobCallable(); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Submits job to a cluster. * - *

Sample code: - * - *


-   * try (JobControllerClient jobControllerClient = JobControllerClient.create()) {
-   *   String projectId = "";
-   *   String region = "";
-   *   Job job = Job.newBuilder().build();
-   *   Job response = jobControllerClient.submitJobAsOperationAsync(projectId, region, job).get();
-   * }
-   * 
- * * @param projectId Required. The ID of the Google Cloud Platform project that the job belongs to. * @param region Required. The Dataproc region in which to handle the request. * @param job Required. The job resource. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ - @BetaApi( - "The surface for long-running operations is not stable yet and may change in the future.") public final OperationFuture submitJobAsOperationAsync( String projectId, String region, Job job) { SubmitJobRequest request = @@ -281,105 +209,43 @@ public final OperationFuture submitJobAsOperationAsync( return submitJobAsOperationAsync(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Submits job to a cluster. * - *

Sample code: - * - *


-   * try (JobControllerClient jobControllerClient = JobControllerClient.create()) {
-   *   String projectId = "";
-   *   String region = "";
-   *   Job job = Job.newBuilder().build();
-   *   SubmitJobRequest request = SubmitJobRequest.newBuilder()
-   *     .setProjectId(projectId)
-   *     .setRegion(region)
-   *     .setJob(job)
-   *     .build();
-   *   Job response = jobControllerClient.submitJobAsOperationAsync(request).get();
-   * }
-   * 
- * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ - @BetaApi( - "The surface for long-running operations is not stable yet and may change in the future.") public final OperationFuture submitJobAsOperationAsync( SubmitJobRequest request) { return submitJobAsOperationOperationCallable().futureCall(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Submits job to a cluster. * *

Sample code: - * - *


-   * try (JobControllerClient jobControllerClient = JobControllerClient.create()) {
-   *   String projectId = "";
-   *   String region = "";
-   *   Job job = Job.newBuilder().build();
-   *   SubmitJobRequest request = SubmitJobRequest.newBuilder()
-   *     .setProjectId(projectId)
-   *     .setRegion(region)
-   *     .setJob(job)
-   *     .build();
-   *   OperationFuture<Job, JobMetadata> future = jobControllerClient.submitJobAsOperationOperationCallable().futureCall(request);
-   *   // Do something
-   *   Job response = future.get();
-   * }
-   * 
*/ - @BetaApi("The surface for use by generated code is not stable yet and may change in the future.") public final OperationCallable submitJobAsOperationOperationCallable() { return stub.submitJobAsOperationOperationCallable(); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Submits job to a cluster. * *

Sample code: - * - *


-   * try (JobControllerClient jobControllerClient = JobControllerClient.create()) {
-   *   String projectId = "";
-   *   String region = "";
-   *   Job job = Job.newBuilder().build();
-   *   SubmitJobRequest request = SubmitJobRequest.newBuilder()
-   *     .setProjectId(projectId)
-   *     .setRegion(region)
-   *     .setJob(job)
-   *     .build();
-   *   ApiFuture<Operation> future = jobControllerClient.submitJobAsOperationCallable().futureCall(request);
-   *   // Do something
-   *   Operation response = future.get();
-   * }
-   * 
*/ public final UnaryCallable submitJobAsOperationCallable() { return stub.submitJobAsOperationCallable(); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Gets the resource representation for a job in a project. * - *

Sample code: - * - *


-   * try (JobControllerClient jobControllerClient = JobControllerClient.create()) {
-   *   String projectId = "";
-   *   String region = "";
-   *   String jobId = "";
-   *   Job response = jobControllerClient.getJob(projectId, region, jobId);
-   * }
-   * 
- * * @param projectId Required. The ID of the Google Cloud Platform project that the job belongs to. * @param region Required. The Dataproc region in which to handle the request. * @param jobId Required. The job ID. @@ -395,26 +261,10 @@ public final Job getJob(String projectId, String region, String jobId) { return getJob(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Gets the resource representation for a job in a project. * - *

Sample code: - * - *


-   * try (JobControllerClient jobControllerClient = JobControllerClient.create()) {
-   *   String projectId = "";
-   *   String region = "";
-   *   String jobId = "";
-   *   GetJobRequest request = GetJobRequest.newBuilder()
-   *     .setProjectId(projectId)
-   *     .setRegion(region)
-   *     .setJobId(jobId)
-   *     .build();
-   *   Job response = jobControllerClient.getJob(request);
-   * }
-   * 
- * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -422,48 +272,20 @@ public final Job getJob(GetJobRequest request) { return getJobCallable().call(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Gets the resource representation for a job in a project. * *

Sample code: - * - *


-   * try (JobControllerClient jobControllerClient = JobControllerClient.create()) {
-   *   String projectId = "";
-   *   String region = "";
-   *   String jobId = "";
-   *   GetJobRequest request = GetJobRequest.newBuilder()
-   *     .setProjectId(projectId)
-   *     .setRegion(region)
-   *     .setJobId(jobId)
-   *     .build();
-   *   ApiFuture<Job> future = jobControllerClient.getJobCallable().futureCall(request);
-   *   // Do something
-   *   Job response = future.get();
-   * }
-   * 
*/ public final UnaryCallable getJobCallable() { return stub.getJobCallable(); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Lists regions/{region}/jobs in a project. * - *

Sample code: - * - *


-   * try (JobControllerClient jobControllerClient = JobControllerClient.create()) {
-   *   String projectId = "";
-   *   String region = "";
-   *   for (Job element : jobControllerClient.listJobs(projectId, region).iterateAll()) {
-   *     // doThingsWith(element);
-   *   }
-   * }
-   * 
- * * @param projectId Required. The ID of the Google Cloud Platform project that the job belongs to. * @param region Required. The Dataproc region in which to handle the request. * @throws com.google.api.gax.rpc.ApiException if the remote call fails @@ -474,23 +296,10 @@ public final ListJobsPagedResponse listJobs(String projectId, String region) { return listJobs(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Lists regions/{region}/jobs in a project. * - *

Sample code: - * - *


-   * try (JobControllerClient jobControllerClient = JobControllerClient.create()) {
-   *   String projectId = "";
-   *   String region = "";
-   *   String filter = "";
-   *   for (Job element : jobControllerClient.listJobs(projectId, region, filter).iterateAll()) {
-   *     // doThingsWith(element);
-   *   }
-   * }
-   * 
- * * @param projectId Required. The ID of the Google Cloud Platform project that the job belongs to. * @param region Required. The Dataproc region in which to handle the request. * @param filter Optional. A filter constraining the jobs to list. Filters are case-sensitive and @@ -514,26 +323,10 @@ public final ListJobsPagedResponse listJobs(String projectId, String region, Str return listJobs(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Lists regions/{region}/jobs in a project. * - *

Sample code: - * - *


-   * try (JobControllerClient jobControllerClient = JobControllerClient.create()) {
-   *   String projectId = "";
-   *   String region = "";
-   *   ListJobsRequest request = ListJobsRequest.newBuilder()
-   *     .setProjectId(projectId)
-   *     .setRegion(region)
-   *     .build();
-   *   for (Job element : jobControllerClient.listJobs(request).iterateAll()) {
-   *     // doThingsWith(element);
-   *   }
-   * }
-   * 
- * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -541,89 +334,30 @@ public final ListJobsPagedResponse listJobs(ListJobsRequest request) { return listJobsPagedCallable().call(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Lists regions/{region}/jobs in a project. * *

Sample code: - * - *


-   * try (JobControllerClient jobControllerClient = JobControllerClient.create()) {
-   *   String projectId = "";
-   *   String region = "";
-   *   ListJobsRequest request = ListJobsRequest.newBuilder()
-   *     .setProjectId(projectId)
-   *     .setRegion(region)
-   *     .build();
-   *   ApiFuture<ListJobsPagedResponse> future = jobControllerClient.listJobsPagedCallable().futureCall(request);
-   *   // Do something
-   *   for (Job element : future.get().iterateAll()) {
-   *     // doThingsWith(element);
-   *   }
-   * }
-   * 
*/ public final UnaryCallable listJobsPagedCallable() { return stub.listJobsPagedCallable(); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Lists regions/{region}/jobs in a project. * *

Sample code: - * - *


-   * try (JobControllerClient jobControllerClient = JobControllerClient.create()) {
-   *   String projectId = "";
-   *   String region = "";
-   *   ListJobsRequest request = ListJobsRequest.newBuilder()
-   *     .setProjectId(projectId)
-   *     .setRegion(region)
-   *     .build();
-   *   while (true) {
-   *     ListJobsResponse response = jobControllerClient.listJobsCallable().call(request);
-   *     for (Job element : response.getJobsList()) {
-   *       // doThingsWith(element);
-   *     }
-   *     String nextPageToken = response.getNextPageToken();
-   *     if (!Strings.isNullOrEmpty(nextPageToken)) {
-   *       request = request.toBuilder().setPageToken(nextPageToken).build();
-   *     } else {
-   *       break;
-   *     }
-   *   }
-   * }
-   * 
*/ public final UnaryCallable listJobsCallable() { return stub.listJobsCallable(); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Updates a job in a project. * - *

Sample code: - * - *


-   * try (JobControllerClient jobControllerClient = JobControllerClient.create()) {
-   *   String projectId = "";
-   *   String region = "";
-   *   String jobId = "";
-   *   Job job = Job.newBuilder().build();
-   *   FieldMask updateMask = FieldMask.newBuilder().build();
-   *   UpdateJobRequest request = UpdateJobRequest.newBuilder()
-   *     .setProjectId(projectId)
-   *     .setRegion(region)
-   *     .setJobId(jobId)
-   *     .setJob(job)
-   *     .setUpdateMask(updateMask)
-   *     .build();
-   *   Job response = jobControllerClient.updateJob(request);
-   * }
-   * 
- * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -631,54 +365,23 @@ public final Job updateJob(UpdateJobRequest request) { return updateJobCallable().call(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Updates a job in a project. * *

Sample code: - * - *


-   * try (JobControllerClient jobControllerClient = JobControllerClient.create()) {
-   *   String projectId = "";
-   *   String region = "";
-   *   String jobId = "";
-   *   Job job = Job.newBuilder().build();
-   *   FieldMask updateMask = FieldMask.newBuilder().build();
-   *   UpdateJobRequest request = UpdateJobRequest.newBuilder()
-   *     .setProjectId(projectId)
-   *     .setRegion(region)
-   *     .setJobId(jobId)
-   *     .setJob(job)
-   *     .setUpdateMask(updateMask)
-   *     .build();
-   *   ApiFuture<Job> future = jobControllerClient.updateJobCallable().futureCall(request);
-   *   // Do something
-   *   Job response = future.get();
-   * }
-   * 
*/ public final UnaryCallable updateJobCallable() { return stub.updateJobCallable(); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Starts a job cancellation request. To access the job resource after cancellation, call * [regions/{region}/jobs.list](https://cloud.google.com/dataproc/docs/reference/rest/v1beta2/projects.regions.jobs/list) * or * [regions/{region}/jobs.get](https://cloud.google.com/dataproc/docs/reference/rest/v1beta2/projects.regions.jobs/get). * - *

Sample code: - * - *


-   * try (JobControllerClient jobControllerClient = JobControllerClient.create()) {
-   *   String projectId = "";
-   *   String region = "";
-   *   String jobId = "";
-   *   Job response = jobControllerClient.cancelJob(projectId, region, jobId);
-   * }
-   * 
- * * @param projectId Required. The ID of the Google Cloud Platform project that the job belongs to. * @param region Required. The Dataproc region in which to handle the request. * @param jobId Required. The job ID. @@ -694,29 +397,13 @@ public final Job cancelJob(String projectId, String region, String jobId) { return cancelJob(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Starts a job cancellation request. To access the job resource after cancellation, call * [regions/{region}/jobs.list](https://cloud.google.com/dataproc/docs/reference/rest/v1beta2/projects.regions.jobs/list) * or * [regions/{region}/jobs.get](https://cloud.google.com/dataproc/docs/reference/rest/v1beta2/projects.regions.jobs/get). * - *

Sample code: - * - *


-   * try (JobControllerClient jobControllerClient = JobControllerClient.create()) {
-   *   String projectId = "";
-   *   String region = "";
-   *   String jobId = "";
-   *   CancelJobRequest request = CancelJobRequest.newBuilder()
-   *     .setProjectId(projectId)
-   *     .setRegion(region)
-   *     .setJobId(jobId)
-   *     .build();
-   *   Job response = jobControllerClient.cancelJob(request);
-   * }
-   * 
- * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -724,7 +411,7 @@ public final Job cancelJob(CancelJobRequest request) { return cancelJobCallable().call(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Starts a job cancellation request. To access the job resource after cancellation, call * [regions/{region}/jobs.list](https://cloud.google.com/dataproc/docs/reference/rest/v1beta2/projects.regions.jobs/list) @@ -732,43 +419,16 @@ public final Job cancelJob(CancelJobRequest request) { * [regions/{region}/jobs.get](https://cloud.google.com/dataproc/docs/reference/rest/v1beta2/projects.regions.jobs/get). * *

Sample code: - * - *


-   * try (JobControllerClient jobControllerClient = JobControllerClient.create()) {
-   *   String projectId = "";
-   *   String region = "";
-   *   String jobId = "";
-   *   CancelJobRequest request = CancelJobRequest.newBuilder()
-   *     .setProjectId(projectId)
-   *     .setRegion(region)
-   *     .setJobId(jobId)
-   *     .build();
-   *   ApiFuture<Job> future = jobControllerClient.cancelJobCallable().futureCall(request);
-   *   // Do something
-   *   Job response = future.get();
-   * }
-   * 
*/ public final UnaryCallable cancelJobCallable() { return stub.cancelJobCallable(); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Deletes the job from the project. If the job is active, the delete fails, and the response * returns `FAILED_PRECONDITION`. * - *

Sample code: - * - *


-   * try (JobControllerClient jobControllerClient = JobControllerClient.create()) {
-   *   String projectId = "";
-   *   String region = "";
-   *   String jobId = "";
-   *   jobControllerClient.deleteJob(projectId, region, jobId);
-   * }
-   * 
- * * @param projectId Required. The ID of the Google Cloud Platform project that the job belongs to. * @param region Required. The Dataproc region in which to handle the request. * @param jobId Required. The job ID. @@ -784,27 +444,11 @@ public final void deleteJob(String projectId, String region, String jobId) { deleteJob(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Deletes the job from the project. If the job is active, the delete fails, and the response * returns `FAILED_PRECONDITION`. * - *

Sample code: - * - *


-   * try (JobControllerClient jobControllerClient = JobControllerClient.create()) {
-   *   String projectId = "";
-   *   String region = "";
-   *   String jobId = "";
-   *   DeleteJobRequest request = DeleteJobRequest.newBuilder()
-   *     .setProjectId(projectId)
-   *     .setRegion(region)
-   *     .setJobId(jobId)
-   *     .build();
-   *   jobControllerClient.deleteJob(request);
-   * }
-   * 
- * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -812,28 +456,12 @@ public final void deleteJob(DeleteJobRequest request) { deleteJobCallable().call(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Deletes the job from the project. If the job is active, the delete fails, and the response * returns `FAILED_PRECONDITION`. * *

Sample code: - * - *


-   * try (JobControllerClient jobControllerClient = JobControllerClient.create()) {
-   *   String projectId = "";
-   *   String region = "";
-   *   String jobId = "";
-   *   DeleteJobRequest request = DeleteJobRequest.newBuilder()
-   *     .setProjectId(projectId)
-   *     .setRegion(region)
-   *     .setJobId(jobId)
-   *     .build();
-   *   ApiFuture<Void> future = jobControllerClient.deleteJobCallable().futureCall(request);
-   *   // Do something
-   *   future.get();
-   * }
-   * 
*/ public final UnaryCallable deleteJobCallable() { return stub.deleteJobCallable(); diff --git a/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/JobControllerSettings.java b/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/JobControllerSettings.java index 64548f63..01c5dea3 100644 --- a/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/JobControllerSettings.java +++ b/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/JobControllerSettings.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,6 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.dataproc.v1beta2; import static com.google.cloud.dataproc.v1beta2.JobControllerClient.ListJobsPagedResponse; @@ -36,7 +37,7 @@ import java.util.List; import javax.annotation.Generated; -// AUTO-GENERATED DOCUMENTATION AND CLASS +// AUTO-GENERATED DOCUMENTATION AND CLASS. /** * Settings class to configure an instance of {@link JobControllerClient}. * @@ -53,36 +54,34 @@ * *

For example, to set the total timeout of submitJob to 30 seconds: * - *

- * 
- * JobControllerSettings.Builder jobControllerSettingsBuilder =
- *     JobControllerSettings.newBuilder();
+ * 
{@code
+ * JobControllerSettings.Builder jobControllerSettingsBuilder = JobControllerSettings.newBuilder();
  * jobControllerSettingsBuilder
  *     .submitJobSettings()
  *     .setRetrySettings(
- *         jobControllerSettingsBuilder.submitJobSettings().getRetrySettings().toBuilder()
+ *         jobControllerSettingsBuilder
+ *             .submitJobSettings()
+ *             .getRetrySettings()
+ *             .toBuilder()
  *             .setTotalTimeout(Duration.ofSeconds(30))
  *             .build());
  * JobControllerSettings jobControllerSettings = jobControllerSettingsBuilder.build();
- * 
- * 
+ * }
*/ -@Generated("by gapic-generator") -@BetaApi +@Generated("by gapic-generator-java") public class JobControllerSettings extends ClientSettings { + /** Returns the object with the settings used for calls to submitJob. */ public UnaryCallSettings submitJobSettings() { return ((JobControllerStubSettings) getStubSettings()).submitJobSettings(); } - /** Returns the object with the settings used for calls to submitJobAsOperation. */ + /** Returns the object with the settings used for calls to submitJobAs. */ public UnaryCallSettings submitJobAsOperationSettings() { return ((JobControllerStubSettings) getStubSettings()).submitJobAsOperationSettings(); } /** Returns the object with the settings used for calls to submitJobAsOperation. */ - @BetaApi( - "The surface for long-running operations is not stable yet and may change in the future.") public OperationCallSettings submitJobAsOperationOperationSettings() { return ((JobControllerStubSettings) getStubSettings()).submitJobAsOperationOperationSettings(); @@ -174,18 +173,15 @@ protected JobControllerSettings(Builder settingsBuilder) throws IOException { /** Builder for JobControllerSettings. */ public static class Builder extends ClientSettings.Builder { + protected Builder() throws IOException { - this((ClientContext) null); + this(((ClientContext) null)); } protected Builder(ClientContext clientContext) { super(JobControllerStubSettings.newBuilder(clientContext)); } - private static Builder createDefault() { - return new Builder(JobControllerStubSettings.newBuilder()); - } - protected Builder(JobControllerSettings settings) { super(settings.getStubSettings().toBuilder()); } @@ -194,11 +190,15 @@ protected Builder(JobControllerStubSettings.Builder stubSettings) { super(stubSettings); } + private static Builder createDefault() { + return new Builder(JobControllerStubSettings.newBuilder()); + } + public JobControllerStubSettings.Builder getStubSettingsBuilder() { return ((JobControllerStubSettings.Builder) getStubSettings()); } - // NEXT_MAJOR_VER: remove 'throws Exception' + // NEXT_MAJOR_VER: remove 'throws Exception'. /** * Applies the given settings updater function to all of the unary API methods in this service. * @@ -216,14 +216,12 @@ public UnaryCallSettings.Builder submitJobSettings() { return getStubSettingsBuilder().submitJobSettings(); } - /** Returns the builder for the settings used for calls to submitJobAsOperation. */ + /** Returns the builder for the settings used for calls to submitJobAs. */ public UnaryCallSettings.Builder submitJobAsOperationSettings() { return getStubSettingsBuilder().submitJobAsOperationSettings(); } /** Returns the builder for the settings used for calls to submitJobAsOperation. */ - @BetaApi( - "The surface for long-running operations is not stable yet and may change in the future.") public OperationCallSettings.Builder submitJobAsOperationOperationSettings() { return getStubSettingsBuilder().submitJobAsOperationOperationSettings(); diff --git a/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/WorkflowTemplateServiceClient.java b/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/WorkflowTemplateServiceClient.java index 62caeb0d..7a14f842 100644 --- a/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/WorkflowTemplateServiceClient.java +++ b/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/WorkflowTemplateServiceClient.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,6 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.dataproc.v1beta2; import com.google.api.core.ApiFunction; @@ -39,24 +40,14 @@ import java.util.concurrent.TimeUnit; import javax.annotation.Generated; -// AUTO-GENERATED DOCUMENTATION AND SERVICE +// AUTO-GENERATED DOCUMENTATION AND CLASS. /** * Service Description: The API interface for managing Workflow Templates in the Dataproc API. * *

This class provides the ability to make remote calls to the backing service through method * calls that map to API methods. Sample code to get started: * - *

- * 
- * try (WorkflowTemplateServiceClient workflowTemplateServiceClient = WorkflowTemplateServiceClient.create()) {
- *   RegionName parent = RegionName.of("[PROJECT]", "[REGION]");
- *   WorkflowTemplate template = WorkflowTemplate.newBuilder().build();
- *   WorkflowTemplate response = workflowTemplateServiceClient.createWorkflowTemplate(parent, template);
- * }
- * 
- * 
- * - *

Note: close() needs to be called on the workflowTemplateServiceClient object to clean up + *

Note: close() needs to be called on the WorkflowTemplateServiceClient object to clean up * resources such as threads. In the example above, try-with-resources is used, which automatically * calls close(). * @@ -85,30 +76,28 @@ * *

To customize credentials: * - *

- * 
+ * 
{@code
  * WorkflowTemplateServiceSettings workflowTemplateServiceSettings =
  *     WorkflowTemplateServiceSettings.newBuilder()
  *         .setCredentialsProvider(FixedCredentialsProvider.create(myCredentials))
  *         .build();
  * WorkflowTemplateServiceClient workflowTemplateServiceClient =
  *     WorkflowTemplateServiceClient.create(workflowTemplateServiceSettings);
- * 
- * 
+ * }
* - * To customize the endpoint: + *

To customize the endpoint: * - *

- * 
+ * 
{@code
  * WorkflowTemplateServiceSettings workflowTemplateServiceSettings =
  *     WorkflowTemplateServiceSettings.newBuilder().setEndpoint(myEndpoint).build();
  * WorkflowTemplateServiceClient workflowTemplateServiceClient =
  *     WorkflowTemplateServiceClient.create(workflowTemplateServiceSettings);
- * 
- * 
+ * }
+ * + *

Please refer to the GitHub repository's samples for more quickstart code snippets. */ -@Generated("by gapic-generator") @BetaApi +@Generated("by gapic-generator") public class WorkflowTemplateServiceClient implements BackgroundResource { private final WorkflowTemplateServiceSettings settings; private final WorkflowTemplateServiceStub stub; @@ -130,7 +119,7 @@ public static final WorkflowTemplateServiceClient create(WorkflowTemplateService /** * Constructs an instance of WorkflowTemplateServiceClient, using the given stub for making calls. - * This is for advanced usage - prefer to use WorkflowTemplateServiceSettings}. + * This is for advanced usage - prefer using create(WorkflowTemplateServiceSettings). */ @BetaApi("A restructuring of stub classes is planned, so this may break in the future") public static final WorkflowTemplateServiceClient create(WorkflowTemplateServiceStub stub) { @@ -169,13 +158,195 @@ public WorkflowTemplateServiceStub getStub() { * Returns the OperationsClient that can be used to query the status of a long-running operation * returned by another API method call. */ - @BetaApi( - "The surface for long-running operations is not stable yet and may change in the future.") public final OperationsClient getOperationsClient() { return operationsClient; } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. + /** + * Creates new workflow template. + * + * @param parent Required. The resource name of the region or location, as described in + * https://cloud.google.com/apis/design/resource_names. + *

    + *
  • For `projects.regions.workflowTemplates,create`, the resource name of the region has + * the following format: `projects/{project_id}/regions/{region}` + *
+ *
    + *
  • For `projects.locations.workflowTemplates.create`, the resource name of the location + * has the following format: `projects/{project_id}/locations/{location}` + *
+ * + * @param template Required. The Dataproc workflow template to create. + * @throws com.google.api.gax.rpc.ApiException if the remote call fails + */ + public final WorkflowTemplate createWorkflowTemplate( + LocationName parent, WorkflowTemplate template) { + CreateWorkflowTemplateRequest request = + CreateWorkflowTemplateRequest.newBuilder() + .setParent(parent == null ? null : parent.toString()) + .setTemplate(template) + .build(); + return createWorkflowTemplate(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD. + /** + * Creates new workflow template. + * + * @param parent Required. The resource name of the region or location, as described in + * https://cloud.google.com/apis/design/resource_names. + *
    + *
  • For `projects.regions.workflowTemplates,create`, the resource name of the region has + * the following format: `projects/{project_id}/regions/{region}` + *
+ *
    + *
  • For `projects.locations.workflowTemplates.create`, the resource name of the location + * has the following format: `projects/{project_id}/locations/{location}` + *
+ * + * @param template Required. The Dataproc workflow template to create. + * @throws com.google.api.gax.rpc.ApiException if the remote call fails + */ + public final WorkflowTemplate createWorkflowTemplate( + RegionName parent, WorkflowTemplate template) { + CreateWorkflowTemplateRequest request = + CreateWorkflowTemplateRequest.newBuilder() + .setParent(parent == null ? null : parent.toString()) + .setTemplate(template) + .build(); + return createWorkflowTemplate(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD. + /** + * Creates new workflow template. + * + * @param parent Required. The resource name of the region or location, as described in + * https://cloud.google.com/apis/design/resource_names. + *
    + *
  • For `projects.regions.workflowTemplates,create`, the resource name of the region has + * the following format: `projects/{project_id}/regions/{region}` + *
+ *
    + *
  • For `projects.locations.workflowTemplates.create`, the resource name of the location + * has the following format: `projects/{project_id}/locations/{location}` + *
+ * + * @param template Required. The Dataproc workflow template to create. + * @throws com.google.api.gax.rpc.ApiException if the remote call fails + */ + public final WorkflowTemplate createWorkflowTemplate(String parent, WorkflowTemplate template) { + CreateWorkflowTemplateRequest request = + CreateWorkflowTemplateRequest.newBuilder().setParent(parent).setTemplate(template).build(); + return createWorkflowTemplate(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD. + /** + * Creates new workflow template. + * + * @param request The request object containing all of the parameters for the API call. + * @throws com.google.api.gax.rpc.ApiException if the remote call fails + */ + public final WorkflowTemplate createWorkflowTemplate(CreateWorkflowTemplateRequest request) { + return createWorkflowTemplateCallable().call(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD. + /** + * Creates new workflow template. + * + *

Sample code: + */ + public final UnaryCallable + createWorkflowTemplateCallable() { + return stub.createWorkflowTemplateCallable(); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD. + /** + * Retrieves the latest workflow template. + * + *

Can retrieve previously instantiated template by specifying optional version parameter. + * + * @param name Required. The resource name of the workflow template, as described in + * https://cloud.google.com/apis/design/resource_names. + *

    + *
  • For `projects.regions.workflowTemplates.get`, the resource name of the template has + * the following format: + * `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}` + *
+ *
    + *
  • For `projects.locations.workflowTemplates.get`, the resource name of the template has + * the following format: + * `projects/{project_id}/locations/{location}/workflowTemplates/{template_id}` + *
+ * + * @throws com.google.api.gax.rpc.ApiException if the remote call fails + */ + public final WorkflowTemplate getWorkflowTemplate(WorkflowTemplateName name) { + GetWorkflowTemplateRequest request = + GetWorkflowTemplateRequest.newBuilder() + .setName(name == null ? null : name.toString()) + .build(); + return getWorkflowTemplate(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD. + /** + * Retrieves the latest workflow template. + * + *

Can retrieve previously instantiated template by specifying optional version parameter. + * + * @param name Required. The resource name of the workflow template, as described in + * https://cloud.google.com/apis/design/resource_names. + *

    + *
  • For `projects.regions.workflowTemplates.get`, the resource name of the template has + * the following format: + * `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}` + *
+ *
    + *
  • For `projects.locations.workflowTemplates.get`, the resource name of the template has + * the following format: + * `projects/{project_id}/locations/{location}/workflowTemplates/{template_id}` + *
+ * + * @throws com.google.api.gax.rpc.ApiException if the remote call fails + */ + public final WorkflowTemplate getWorkflowTemplate(String name) { + GetWorkflowTemplateRequest request = + GetWorkflowTemplateRequest.newBuilder().setName(name).build(); + return getWorkflowTemplate(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD. + /** + * Retrieves the latest workflow template. + * + *

Can retrieve previously instantiated template by specifying optional version parameter. + * + * @param request The request object containing all of the parameters for the API call. + * @throws com.google.api.gax.rpc.ApiException if the remote call fails + */ + public final WorkflowTemplate getWorkflowTemplate(GetWorkflowTemplateRequest request) { + return getWorkflowTemplateCallable().call(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD. + /** + * Retrieves the latest workflow template. + * + *

Can retrieve previously instantiated template by specifying optional version parameter. + * + *

Sample code: + */ + public final UnaryCallable + getWorkflowTemplateCallable() { + return stub.getWorkflowTemplateCallable(); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Instantiates a template and begins execution. * @@ -195,27 +366,21 @@ public final OperationsClient getOperationsClient() { *

On successful completion, [Operation.response][google.longrunning.Operation.response] will * be [Empty][google.protobuf.Empty]. * - *

Sample code: - * - *


-   * try (WorkflowTemplateServiceClient workflowTemplateServiceClient = WorkflowTemplateServiceClient.create()) {
-   *   WorkflowTemplateName name = WorkflowTemplateName.ofProjectRegionWorkflowTemplateName("[PROJECT]", "[REGION]", "[WORKFLOW_TEMPLATE]");
-   *   workflowTemplateServiceClient.instantiateWorkflowTemplateAsync(name).get();
-   * }
-   * 
- * * @param name Required. The resource name of the workflow template, as described in * https://cloud.google.com/apis/design/resource_names. - *

* For `projects.regions.workflowTemplates.instantiate`, the resource name of the - * template has the following format: - * `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}` - *

* For `projects.locations.workflowTemplates.instantiate`, the resource name of the - * template has the following format: - * `projects/{project_id}/locations/{location}/workflowTemplates/{template_id}` + *

    + *
  • For `projects.regions.workflowTemplates.instantiate`, the resource name of the + * template has the following format: + * `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}` + *
+ *
    + *
  • For `projects.locations.workflowTemplates.instantiate`, the resource name of the + * template has the following format: + * `projects/{project_id}/locations/{location}/workflowTemplates/{template_id}` + *
+ * * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ - @BetaApi( - "The surface for long-running operations is not stable yet and may change in the future.") public final OperationFuture instantiateWorkflowTemplateAsync( WorkflowTemplateName name) { InstantiateWorkflowTemplateRequest request = @@ -225,7 +390,7 @@ public final OperationFuture instantiateWorkflowTemplat return instantiateWorkflowTemplateAsync(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Instantiates a template and begins execution. * @@ -245,27 +410,21 @@ public final OperationFuture instantiateWorkflowTemplat *

On successful completion, [Operation.response][google.longrunning.Operation.response] will * be [Empty][google.protobuf.Empty]. * - *

Sample code: - * - *


-   * try (WorkflowTemplateServiceClient workflowTemplateServiceClient = WorkflowTemplateServiceClient.create()) {
-   *   WorkflowTemplateName name = WorkflowTemplateName.ofProjectRegionWorkflowTemplateName("[PROJECT]", "[REGION]", "[WORKFLOW_TEMPLATE]");
-   *   workflowTemplateServiceClient.instantiateWorkflowTemplateAsync(name.toString()).get();
-   * }
-   * 
- * * @param name Required. The resource name of the workflow template, as described in * https://cloud.google.com/apis/design/resource_names. - *

* For `projects.regions.workflowTemplates.instantiate`, the resource name of the - * template has the following format: - * `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}` - *

* For `projects.locations.workflowTemplates.instantiate`, the resource name of the - * template has the following format: - * `projects/{project_id}/locations/{location}/workflowTemplates/{template_id}` + *

    + *
  • For `projects.regions.workflowTemplates.instantiate`, the resource name of the + * template has the following format: + * `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}` + *
+ *
    + *
  • For `projects.locations.workflowTemplates.instantiate`, the resource name of the + * template has the following format: + * `projects/{project_id}/locations/{location}/workflowTemplates/{template_id}` + *
+ * * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ - @BetaApi( - "The surface for long-running operations is not stable yet and may change in the future.") public final OperationFuture instantiateWorkflowTemplateAsync( String name) { InstantiateWorkflowTemplateRequest request = @@ -273,7 +432,7 @@ public final OperationFuture instantiateWorkflowTemplat return instantiateWorkflowTemplateAsync(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Instantiates a template and begins execution. * @@ -293,30 +452,23 @@ public final OperationFuture instantiateWorkflowTemplat *

On successful completion, [Operation.response][google.longrunning.Operation.response] will * be [Empty][google.protobuf.Empty]. * - *

Sample code: - * - *


-   * try (WorkflowTemplateServiceClient workflowTemplateServiceClient = WorkflowTemplateServiceClient.create()) {
-   *   WorkflowTemplateName name = WorkflowTemplateName.ofProjectRegionWorkflowTemplateName("[PROJECT]", "[REGION]", "[WORKFLOW_TEMPLATE]");
-   *   Map<String, String> parameters = new HashMap<>();
-   *   workflowTemplateServiceClient.instantiateWorkflowTemplateAsync(name, parameters).get();
-   * }
-   * 
- * * @param name Required. The resource name of the workflow template, as described in * https://cloud.google.com/apis/design/resource_names. - *

* For `projects.regions.workflowTemplates.instantiate`, the resource name of the - * template has the following format: - * `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}` - *

* For `projects.locations.workflowTemplates.instantiate`, the resource name of the - * template has the following format: - * `projects/{project_id}/locations/{location}/workflowTemplates/{template_id}` + *

    + *
  • For `projects.regions.workflowTemplates.instantiate`, the resource name of the + * template has the following format: + * `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}` + *
+ *
    + *
  • For `projects.locations.workflowTemplates.instantiate`, the resource name of the + * template has the following format: + * `projects/{project_id}/locations/{location}/workflowTemplates/{template_id}` + *
+ * * @param parameters Optional. Map from parameter names to values that should be used for those * parameters. Values may not exceed 100 characters. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ - @BetaApi( - "The surface for long-running operations is not stable yet and may change in the future.") public final OperationFuture instantiateWorkflowTemplateAsync( WorkflowTemplateName name, Map parameters) { InstantiateWorkflowTemplateRequest request = @@ -327,7 +479,7 @@ public final OperationFuture instantiateWorkflowTemplat return instantiateWorkflowTemplateAsync(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Instantiates a template and begins execution. * @@ -347,30 +499,23 @@ public final OperationFuture instantiateWorkflowTemplat *

On successful completion, [Operation.response][google.longrunning.Operation.response] will * be [Empty][google.protobuf.Empty]. * - *

Sample code: - * - *


-   * try (WorkflowTemplateServiceClient workflowTemplateServiceClient = WorkflowTemplateServiceClient.create()) {
-   *   WorkflowTemplateName name = WorkflowTemplateName.ofProjectRegionWorkflowTemplateName("[PROJECT]", "[REGION]", "[WORKFLOW_TEMPLATE]");
-   *   Map<String, String> parameters = new HashMap<>();
-   *   workflowTemplateServiceClient.instantiateWorkflowTemplateAsync(name.toString(), parameters).get();
-   * }
-   * 
- * * @param name Required. The resource name of the workflow template, as described in * https://cloud.google.com/apis/design/resource_names. - *

* For `projects.regions.workflowTemplates.instantiate`, the resource name of the - * template has the following format: - * `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}` - *

* For `projects.locations.workflowTemplates.instantiate`, the resource name of the - * template has the following format: - * `projects/{project_id}/locations/{location}/workflowTemplates/{template_id}` + *

    + *
  • For `projects.regions.workflowTemplates.instantiate`, the resource name of the + * template has the following format: + * `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}` + *
+ *
    + *
  • For `projects.locations.workflowTemplates.instantiate`, the resource name of the + * template has the following format: + * `projects/{project_id}/locations/{location}/workflowTemplates/{template_id}` + *
+ * * @param parameters Optional. Map from parameter names to values that should be used for those * parameters. Values may not exceed 100 characters. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ - @BetaApi( - "The surface for long-running operations is not stable yet and may change in the future.") public final OperationFuture instantiateWorkflowTemplateAsync( String name, Map parameters) { InstantiateWorkflowTemplateRequest request = @@ -381,7 +526,7 @@ public final OperationFuture instantiateWorkflowTemplat return instantiateWorkflowTemplateAsync(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Instantiates a template and begins execution. * @@ -401,29 +546,15 @@ public final OperationFuture instantiateWorkflowTemplat *

On successful completion, [Operation.response][google.longrunning.Operation.response] will * be [Empty][google.protobuf.Empty]. * - *

Sample code: - * - *


-   * try (WorkflowTemplateServiceClient workflowTemplateServiceClient = WorkflowTemplateServiceClient.create()) {
-   *   WorkflowTemplateName name = WorkflowTemplateName.ofProjectRegionWorkflowTemplateName("[PROJECT]", "[REGION]", "[WORKFLOW_TEMPLATE]");
-   *   InstantiateWorkflowTemplateRequest request = InstantiateWorkflowTemplateRequest.newBuilder()
-   *     .setName(name.toString())
-   *     .build();
-   *   workflowTemplateServiceClient.instantiateWorkflowTemplateAsync(request).get();
-   * }
-   * 
- * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ - @BetaApi( - "The surface for long-running operations is not stable yet and may change in the future.") public final OperationFuture instantiateWorkflowTemplateAsync( InstantiateWorkflowTemplateRequest request) { return instantiateWorkflowTemplateOperationCallable().futureCall(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Instantiates a template and begins execution. * @@ -444,26 +575,13 @@ public final OperationFuture instantiateWorkflowTemplat * be [Empty][google.protobuf.Empty]. * *

Sample code: - * - *


-   * try (WorkflowTemplateServiceClient workflowTemplateServiceClient = WorkflowTemplateServiceClient.create()) {
-   *   WorkflowTemplateName name = WorkflowTemplateName.ofProjectRegionWorkflowTemplateName("[PROJECT]", "[REGION]", "[WORKFLOW_TEMPLATE]");
-   *   InstantiateWorkflowTemplateRequest request = InstantiateWorkflowTemplateRequest.newBuilder()
-   *     .setName(name.toString())
-   *     .build();
-   *   OperationFuture<Empty, WorkflowMetadata> future = workflowTemplateServiceClient.instantiateWorkflowTemplateOperationCallable().futureCall(request);
-   *   // Do something
-   *   future.get();
-   * }
-   * 
*/ - @BetaApi("The surface for use by generated code is not stable yet and may change in the future.") public final OperationCallable instantiateWorkflowTemplateOperationCallable() { return stub.instantiateWorkflowTemplateOperationCallable(); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Instantiates a template and begins execution. * @@ -484,25 +602,13 @@ public final OperationFuture instantiateWorkflowTemplat * be [Empty][google.protobuf.Empty]. * *

Sample code: - * - *


-   * try (WorkflowTemplateServiceClient workflowTemplateServiceClient = WorkflowTemplateServiceClient.create()) {
-   *   WorkflowTemplateName name = WorkflowTemplateName.ofProjectRegionWorkflowTemplateName("[PROJECT]", "[REGION]", "[WORKFLOW_TEMPLATE]");
-   *   InstantiateWorkflowTemplateRequest request = InstantiateWorkflowTemplateRequest.newBuilder()
-   *     .setName(name.toString())
-   *     .build();
-   *   ApiFuture<Operation> future = workflowTemplateServiceClient.instantiateWorkflowTemplateCallable().futureCall(request);
-   *   // Do something
-   *   future.get();
-   * }
-   * 
*/ public final UnaryCallable instantiateWorkflowTemplateCallable() { return stub.instantiateWorkflowTemplateCallable(); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Instantiates a template and begins execution. * @@ -527,29 +633,22 @@ public final OperationFuture instantiateWorkflowTemplat *

On successful completion, [Operation.response][google.longrunning.Operation.response] will * be [Empty][google.protobuf.Empty]. * - *

Sample code: - * - *


-   * try (WorkflowTemplateServiceClient workflowTemplateServiceClient = WorkflowTemplateServiceClient.create()) {
-   *   RegionName parent = RegionName.of("[PROJECT]", "[REGION]");
-   *   WorkflowTemplate template = WorkflowTemplate.newBuilder().build();
-   *   workflowTemplateServiceClient.instantiateInlineWorkflowTemplateAsync(parent, template).get();
-   * }
-   * 
- * * @param parent Required. The resource name of the region or location, as described in * https://cloud.google.com/apis/design/resource_names. - *

* For `projects.regions.workflowTemplates,instantiateinline`, the resource name of - * the region has the following format: `projects/{project_id}/regions/{region}` - *

* For `projects.locations.workflowTemplates.instantiateinline`, the resource name of - * the location has the following format: `projects/{project_id}/locations/{location}` + *

    + *
  • For `projects.regions.workflowTemplates,instantiateinline`, the resource name of the + * region has the following format: `projects/{project_id}/regions/{region}` + *
+ *
    + *
  • For `projects.locations.workflowTemplates.instantiateinline`, the resource name of + * the location has the following format: `projects/{project_id}/locations/{location}` + *
+ * * @param template Required. The workflow template to instantiate. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ - @BetaApi( - "The surface for long-running operations is not stable yet and may change in the future.") public final OperationFuture instantiateInlineWorkflowTemplateAsync( - RegionName parent, WorkflowTemplate template) { + LocationName parent, WorkflowTemplate template) { InstantiateInlineWorkflowTemplateRequest request = InstantiateInlineWorkflowTemplateRequest.newBuilder() .setParent(parent == null ? null : parent.toString()) @@ -558,7 +657,7 @@ public final OperationFuture instantiateInlineWorkflowT return instantiateInlineWorkflowTemplateAsync(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Instantiates a template and begins execution. * @@ -583,29 +682,22 @@ public final OperationFuture instantiateInlineWorkflowT *

On successful completion, [Operation.response][google.longrunning.Operation.response] will * be [Empty][google.protobuf.Empty]. * - *

Sample code: - * - *


-   * try (WorkflowTemplateServiceClient workflowTemplateServiceClient = WorkflowTemplateServiceClient.create()) {
-   *   LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]");
-   *   WorkflowTemplate template = WorkflowTemplate.newBuilder().build();
-   *   workflowTemplateServiceClient.instantiateInlineWorkflowTemplateAsync(parent, template).get();
-   * }
-   * 
- * * @param parent Required. The resource name of the region or location, as described in * https://cloud.google.com/apis/design/resource_names. - *

* For `projects.regions.workflowTemplates,instantiateinline`, the resource name of - * the region has the following format: `projects/{project_id}/regions/{region}` - *

* For `projects.locations.workflowTemplates.instantiateinline`, the resource name of - * the location has the following format: `projects/{project_id}/locations/{location}` + *

    + *
  • For `projects.regions.workflowTemplates,instantiateinline`, the resource name of the + * region has the following format: `projects/{project_id}/regions/{region}` + *
+ *
    + *
  • For `projects.locations.workflowTemplates.instantiateinline`, the resource name of + * the location has the following format: `projects/{project_id}/locations/{location}` + *
+ * * @param template Required. The workflow template to instantiate. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ - @BetaApi( - "The surface for long-running operations is not stable yet and may change in the future.") public final OperationFuture instantiateInlineWorkflowTemplateAsync( - LocationName parent, WorkflowTemplate template) { + RegionName parent, WorkflowTemplate template) { InstantiateInlineWorkflowTemplateRequest request = InstantiateInlineWorkflowTemplateRequest.newBuilder() .setParent(parent == null ? null : parent.toString()) @@ -614,7 +706,7 @@ public final OperationFuture instantiateInlineWorkflowT return instantiateInlineWorkflowTemplateAsync(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Instantiates a template and begins execution. * @@ -639,27 +731,20 @@ public final OperationFuture instantiateInlineWorkflowT *

On successful completion, [Operation.response][google.longrunning.Operation.response] will * be [Empty][google.protobuf.Empty]. * - *

Sample code: - * - *


-   * try (WorkflowTemplateServiceClient workflowTemplateServiceClient = WorkflowTemplateServiceClient.create()) {
-   *   RegionName parent = RegionName.of("[PROJECT]", "[REGION]");
-   *   WorkflowTemplate template = WorkflowTemplate.newBuilder().build();
-   *   workflowTemplateServiceClient.instantiateInlineWorkflowTemplateAsync(parent.toString(), template).get();
-   * }
-   * 
- * * @param parent Required. The resource name of the region or location, as described in * https://cloud.google.com/apis/design/resource_names. - *

* For `projects.regions.workflowTemplates,instantiateinline`, the resource name of - * the region has the following format: `projects/{project_id}/regions/{region}` - *

* For `projects.locations.workflowTemplates.instantiateinline`, the resource name of - * the location has the following format: `projects/{project_id}/locations/{location}` + *

    + *
  • For `projects.regions.workflowTemplates,instantiateinline`, the resource name of the + * region has the following format: `projects/{project_id}/regions/{region}` + *
+ *
    + *
  • For `projects.locations.workflowTemplates.instantiateinline`, the resource name of + * the location has the following format: `projects/{project_id}/locations/{location}` + *
+ * * @param template Required. The workflow template to instantiate. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ - @BetaApi( - "The surface for long-running operations is not stable yet and may change in the future.") public final OperationFuture instantiateInlineWorkflowTemplateAsync( String parent, WorkflowTemplate template) { InstantiateInlineWorkflowTemplateRequest request = @@ -670,7 +755,7 @@ public final OperationFuture instantiateInlineWorkflowT return instantiateInlineWorkflowTemplateAsync(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Instantiates a template and begins execution. * @@ -695,31 +780,15 @@ public final OperationFuture instantiateInlineWorkflowT *

On successful completion, [Operation.response][google.longrunning.Operation.response] will * be [Empty][google.protobuf.Empty]. * - *

Sample code: - * - *


-   * try (WorkflowTemplateServiceClient workflowTemplateServiceClient = WorkflowTemplateServiceClient.create()) {
-   *   RegionName parent = RegionName.of("[PROJECT]", "[REGION]");
-   *   WorkflowTemplate template = WorkflowTemplate.newBuilder().build();
-   *   InstantiateInlineWorkflowTemplateRequest request = InstantiateInlineWorkflowTemplateRequest.newBuilder()
-   *     .setParent(parent.toString())
-   *     .setTemplate(template)
-   *     .build();
-   *   workflowTemplateServiceClient.instantiateInlineWorkflowTemplateAsync(request).get();
-   * }
-   * 
- * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ - @BetaApi( - "The surface for long-running operations is not stable yet and may change in the future.") public final OperationFuture instantiateInlineWorkflowTemplateAsync( InstantiateInlineWorkflowTemplateRequest request) { return instantiateInlineWorkflowTemplateOperationCallable().futureCall(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Instantiates a template and begins execution. * @@ -745,28 +814,13 @@ public final OperationFuture instantiateInlineWorkflowT * be [Empty][google.protobuf.Empty]. * *

Sample code: - * - *


-   * try (WorkflowTemplateServiceClient workflowTemplateServiceClient = WorkflowTemplateServiceClient.create()) {
-   *   RegionName parent = RegionName.of("[PROJECT]", "[REGION]");
-   *   WorkflowTemplate template = WorkflowTemplate.newBuilder().build();
-   *   InstantiateInlineWorkflowTemplateRequest request = InstantiateInlineWorkflowTemplateRequest.newBuilder()
-   *     .setParent(parent.toString())
-   *     .setTemplate(template)
-   *     .build();
-   *   OperationFuture<Empty, WorkflowMetadata> future = workflowTemplateServiceClient.instantiateInlineWorkflowTemplateOperationCallable().futureCall(request);
-   *   // Do something
-   *   future.get();
-   * }
-   * 
*/ - @BetaApi("The surface for use by generated code is not stable yet and may change in the future.") public final OperationCallable instantiateInlineWorkflowTemplateOperationCallable() { return stub.instantiateInlineWorkflowTemplateOperationCallable(); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Instantiates a template and begins execution. * @@ -792,299 +846,17 @@ public final OperationFuture instantiateInlineWorkflowT * be [Empty][google.protobuf.Empty]. * *

Sample code: - * - *


-   * try (WorkflowTemplateServiceClient workflowTemplateServiceClient = WorkflowTemplateServiceClient.create()) {
-   *   RegionName parent = RegionName.of("[PROJECT]", "[REGION]");
-   *   WorkflowTemplate template = WorkflowTemplate.newBuilder().build();
-   *   InstantiateInlineWorkflowTemplateRequest request = InstantiateInlineWorkflowTemplateRequest.newBuilder()
-   *     .setParent(parent.toString())
-   *     .setTemplate(template)
-   *     .build();
-   *   ApiFuture<Operation> future = workflowTemplateServiceClient.instantiateInlineWorkflowTemplateCallable().futureCall(request);
-   *   // Do something
-   *   future.get();
-   * }
-   * 
*/ public final UnaryCallable instantiateInlineWorkflowTemplateCallable() { return stub.instantiateInlineWorkflowTemplateCallable(); } - // AUTO-GENERATED DOCUMENTATION AND METHOD - /** - * Creates new workflow template. - * - *

Sample code: - * - *


-   * try (WorkflowTemplateServiceClient workflowTemplateServiceClient = WorkflowTemplateServiceClient.create()) {
-   *   RegionName parent = RegionName.of("[PROJECT]", "[REGION]");
-   *   WorkflowTemplate template = WorkflowTemplate.newBuilder().build();
-   *   WorkflowTemplate response = workflowTemplateServiceClient.createWorkflowTemplate(parent, template);
-   * }
-   * 
- * - * @param parent Required. The resource name of the region or location, as described in - * https://cloud.google.com/apis/design/resource_names. - *

* For `projects.regions.workflowTemplates,create`, the resource name of the region - * has the following format: `projects/{project_id}/regions/{region}` - *

* For `projects.locations.workflowTemplates.create`, the resource name of the - * location has the following format: `projects/{project_id}/locations/{location}` - * @param template Required. The Dataproc workflow template to create. - * @throws com.google.api.gax.rpc.ApiException if the remote call fails - */ - public final WorkflowTemplate createWorkflowTemplate( - RegionName parent, WorkflowTemplate template) { - CreateWorkflowTemplateRequest request = - CreateWorkflowTemplateRequest.newBuilder() - .setParent(parent == null ? null : parent.toString()) - .setTemplate(template) - .build(); - return createWorkflowTemplate(request); - } - - // AUTO-GENERATED DOCUMENTATION AND METHOD - /** - * Creates new workflow template. - * - *

Sample code: - * - *


-   * try (WorkflowTemplateServiceClient workflowTemplateServiceClient = WorkflowTemplateServiceClient.create()) {
-   *   LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]");
-   *   WorkflowTemplate template = WorkflowTemplate.newBuilder().build();
-   *   WorkflowTemplate response = workflowTemplateServiceClient.createWorkflowTemplate(parent, template);
-   * }
-   * 
- * - * @param parent Required. The resource name of the region or location, as described in - * https://cloud.google.com/apis/design/resource_names. - *

* For `projects.regions.workflowTemplates,create`, the resource name of the region - * has the following format: `projects/{project_id}/regions/{region}` - *

* For `projects.locations.workflowTemplates.create`, the resource name of the - * location has the following format: `projects/{project_id}/locations/{location}` - * @param template Required. The Dataproc workflow template to create. - * @throws com.google.api.gax.rpc.ApiException if the remote call fails - */ - public final WorkflowTemplate createWorkflowTemplate( - LocationName parent, WorkflowTemplate template) { - CreateWorkflowTemplateRequest request = - CreateWorkflowTemplateRequest.newBuilder() - .setParent(parent == null ? null : parent.toString()) - .setTemplate(template) - .build(); - return createWorkflowTemplate(request); - } - - // AUTO-GENERATED DOCUMENTATION AND METHOD - /** - * Creates new workflow template. - * - *

Sample code: - * - *


-   * try (WorkflowTemplateServiceClient workflowTemplateServiceClient = WorkflowTemplateServiceClient.create()) {
-   *   RegionName parent = RegionName.of("[PROJECT]", "[REGION]");
-   *   WorkflowTemplate template = WorkflowTemplate.newBuilder().build();
-   *   WorkflowTemplate response = workflowTemplateServiceClient.createWorkflowTemplate(parent.toString(), template);
-   * }
-   * 
- * - * @param parent Required. The resource name of the region or location, as described in - * https://cloud.google.com/apis/design/resource_names. - *

* For `projects.regions.workflowTemplates,create`, the resource name of the region - * has the following format: `projects/{project_id}/regions/{region}` - *

* For `projects.locations.workflowTemplates.create`, the resource name of the - * location has the following format: `projects/{project_id}/locations/{location}` - * @param template Required. The Dataproc workflow template to create. - * @throws com.google.api.gax.rpc.ApiException if the remote call fails - */ - public final WorkflowTemplate createWorkflowTemplate(String parent, WorkflowTemplate template) { - CreateWorkflowTemplateRequest request = - CreateWorkflowTemplateRequest.newBuilder().setParent(parent).setTemplate(template).build(); - return createWorkflowTemplate(request); - } - - // AUTO-GENERATED DOCUMENTATION AND METHOD - /** - * Creates new workflow template. - * - *

Sample code: - * - *


-   * try (WorkflowTemplateServiceClient workflowTemplateServiceClient = WorkflowTemplateServiceClient.create()) {
-   *   RegionName parent = RegionName.of("[PROJECT]", "[REGION]");
-   *   WorkflowTemplate template = WorkflowTemplate.newBuilder().build();
-   *   CreateWorkflowTemplateRequest request = CreateWorkflowTemplateRequest.newBuilder()
-   *     .setParent(parent.toString())
-   *     .setTemplate(template)
-   *     .build();
-   *   WorkflowTemplate response = workflowTemplateServiceClient.createWorkflowTemplate(request);
-   * }
-   * 
- * - * @param request The request object containing all of the parameters for the API call. - * @throws com.google.api.gax.rpc.ApiException if the remote call fails - */ - public final WorkflowTemplate createWorkflowTemplate(CreateWorkflowTemplateRequest request) { - return createWorkflowTemplateCallable().call(request); - } - - // AUTO-GENERATED DOCUMENTATION AND METHOD - /** - * Creates new workflow template. - * - *

Sample code: - * - *


-   * try (WorkflowTemplateServiceClient workflowTemplateServiceClient = WorkflowTemplateServiceClient.create()) {
-   *   RegionName parent = RegionName.of("[PROJECT]", "[REGION]");
-   *   WorkflowTemplate template = WorkflowTemplate.newBuilder().build();
-   *   CreateWorkflowTemplateRequest request = CreateWorkflowTemplateRequest.newBuilder()
-   *     .setParent(parent.toString())
-   *     .setTemplate(template)
-   *     .build();
-   *   ApiFuture<WorkflowTemplate> future = workflowTemplateServiceClient.createWorkflowTemplateCallable().futureCall(request);
-   *   // Do something
-   *   WorkflowTemplate response = future.get();
-   * }
-   * 
- */ - public final UnaryCallable - createWorkflowTemplateCallable() { - return stub.createWorkflowTemplateCallable(); - } - - // AUTO-GENERATED DOCUMENTATION AND METHOD - /** - * Retrieves the latest workflow template. - * - *

Can retrieve previously instantiated template by specifying optional version parameter. - * - *

Sample code: - * - *


-   * try (WorkflowTemplateServiceClient workflowTemplateServiceClient = WorkflowTemplateServiceClient.create()) {
-   *   WorkflowTemplateName name = WorkflowTemplateName.ofProjectRegionWorkflowTemplateName("[PROJECT]", "[REGION]", "[WORKFLOW_TEMPLATE]");
-   *   WorkflowTemplate response = workflowTemplateServiceClient.getWorkflowTemplate(name);
-   * }
-   * 
- * - * @param name Required. The resource name of the workflow template, as described in - * https://cloud.google.com/apis/design/resource_names. - *

* For `projects.regions.workflowTemplates.get`, the resource name of the template - * has the following format: - * `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}` - *

* For `projects.locations.workflowTemplates.get`, the resource name of the template - * has the following format: - * `projects/{project_id}/locations/{location}/workflowTemplates/{template_id}` - * @throws com.google.api.gax.rpc.ApiException if the remote call fails - */ - public final WorkflowTemplate getWorkflowTemplate(WorkflowTemplateName name) { - GetWorkflowTemplateRequest request = - GetWorkflowTemplateRequest.newBuilder() - .setName(name == null ? null : name.toString()) - .build(); - return getWorkflowTemplate(request); - } - - // AUTO-GENERATED DOCUMENTATION AND METHOD - /** - * Retrieves the latest workflow template. - * - *

Can retrieve previously instantiated template by specifying optional version parameter. - * - *

Sample code: - * - *


-   * try (WorkflowTemplateServiceClient workflowTemplateServiceClient = WorkflowTemplateServiceClient.create()) {
-   *   WorkflowTemplateName name = WorkflowTemplateName.ofProjectRegionWorkflowTemplateName("[PROJECT]", "[REGION]", "[WORKFLOW_TEMPLATE]");
-   *   WorkflowTemplate response = workflowTemplateServiceClient.getWorkflowTemplate(name.toString());
-   * }
-   * 
- * - * @param name Required. The resource name of the workflow template, as described in - * https://cloud.google.com/apis/design/resource_names. - *

* For `projects.regions.workflowTemplates.get`, the resource name of the template - * has the following format: - * `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}` - *

* For `projects.locations.workflowTemplates.get`, the resource name of the template - * has the following format: - * `projects/{project_id}/locations/{location}/workflowTemplates/{template_id}` - * @throws com.google.api.gax.rpc.ApiException if the remote call fails - */ - public final WorkflowTemplate getWorkflowTemplate(String name) { - GetWorkflowTemplateRequest request = - GetWorkflowTemplateRequest.newBuilder().setName(name).build(); - return getWorkflowTemplate(request); - } - - // AUTO-GENERATED DOCUMENTATION AND METHOD - /** - * Retrieves the latest workflow template. - * - *

Can retrieve previously instantiated template by specifying optional version parameter. - * - *

Sample code: - * - *


-   * try (WorkflowTemplateServiceClient workflowTemplateServiceClient = WorkflowTemplateServiceClient.create()) {
-   *   WorkflowTemplateName name = WorkflowTemplateName.ofProjectRegionWorkflowTemplateName("[PROJECT]", "[REGION]", "[WORKFLOW_TEMPLATE]");
-   *   GetWorkflowTemplateRequest request = GetWorkflowTemplateRequest.newBuilder()
-   *     .setName(name.toString())
-   *     .build();
-   *   WorkflowTemplate response = workflowTemplateServiceClient.getWorkflowTemplate(request);
-   * }
-   * 
- * - * @param request The request object containing all of the parameters for the API call. - * @throws com.google.api.gax.rpc.ApiException if the remote call fails - */ - public final WorkflowTemplate getWorkflowTemplate(GetWorkflowTemplateRequest request) { - return getWorkflowTemplateCallable().call(request); - } - - // AUTO-GENERATED DOCUMENTATION AND METHOD - /** - * Retrieves the latest workflow template. - * - *

Can retrieve previously instantiated template by specifying optional version parameter. - * - *

Sample code: - * - *


-   * try (WorkflowTemplateServiceClient workflowTemplateServiceClient = WorkflowTemplateServiceClient.create()) {
-   *   WorkflowTemplateName name = WorkflowTemplateName.ofProjectRegionWorkflowTemplateName("[PROJECT]", "[REGION]", "[WORKFLOW_TEMPLATE]");
-   *   GetWorkflowTemplateRequest request = GetWorkflowTemplateRequest.newBuilder()
-   *     .setName(name.toString())
-   *     .build();
-   *   ApiFuture<WorkflowTemplate> future = workflowTemplateServiceClient.getWorkflowTemplateCallable().futureCall(request);
-   *   // Do something
-   *   WorkflowTemplate response = future.get();
-   * }
-   * 
- */ - public final UnaryCallable - getWorkflowTemplateCallable() { - return stub.getWorkflowTemplateCallable(); - } - - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Updates (replaces) workflow template. The updated template must contain version that matches * the current server version. * - *

Sample code: - * - *


-   * try (WorkflowTemplateServiceClient workflowTemplateServiceClient = WorkflowTemplateServiceClient.create()) {
-   *   WorkflowTemplate template = WorkflowTemplate.newBuilder().build();
-   *   WorkflowTemplate response = workflowTemplateServiceClient.updateWorkflowTemplate(template);
-   * }
-   * 
- * * @param template Required. The updated workflow template. *

The `template.version` field must match the current version. * @throws com.google.api.gax.rpc.ApiException if the remote call fails @@ -1095,23 +867,11 @@ public final WorkflowTemplate updateWorkflowTemplate(WorkflowTemplate template) return updateWorkflowTemplate(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Updates (replaces) workflow template. The updated template must contain version that matches * the current server version. * - *

Sample code: - * - *


-   * try (WorkflowTemplateServiceClient workflowTemplateServiceClient = WorkflowTemplateServiceClient.create()) {
-   *   WorkflowTemplate template = WorkflowTemplate.newBuilder().build();
-   *   UpdateWorkflowTemplateRequest request = UpdateWorkflowTemplateRequest.newBuilder()
-   *     .setTemplate(template)
-   *     .build();
-   *   WorkflowTemplate response = workflowTemplateServiceClient.updateWorkflowTemplate(request);
-   * }
-   * 
- * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -1119,54 +879,36 @@ public final WorkflowTemplate updateWorkflowTemplate(UpdateWorkflowTemplateReque return updateWorkflowTemplateCallable().call(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Updates (replaces) workflow template. The updated template must contain version that matches * the current server version. * *

Sample code: - * - *


-   * try (WorkflowTemplateServiceClient workflowTemplateServiceClient = WorkflowTemplateServiceClient.create()) {
-   *   WorkflowTemplate template = WorkflowTemplate.newBuilder().build();
-   *   UpdateWorkflowTemplateRequest request = UpdateWorkflowTemplateRequest.newBuilder()
-   *     .setTemplate(template)
-   *     .build();
-   *   ApiFuture<WorkflowTemplate> future = workflowTemplateServiceClient.updateWorkflowTemplateCallable().futureCall(request);
-   *   // Do something
-   *   WorkflowTemplate response = future.get();
-   * }
-   * 
*/ public final UnaryCallable updateWorkflowTemplateCallable() { return stub.updateWorkflowTemplateCallable(); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Lists workflows that match the specified filter in the request. * - *

Sample code: - * - *


-   * try (WorkflowTemplateServiceClient workflowTemplateServiceClient = WorkflowTemplateServiceClient.create()) {
-   *   RegionName parent = RegionName.of("[PROJECT]", "[REGION]");
-   *   for (WorkflowTemplate element : workflowTemplateServiceClient.listWorkflowTemplates(parent).iterateAll()) {
-   *     // doThingsWith(element);
-   *   }
-   * }
-   * 
- * * @param parent Required. The resource name of the region or location, as described in * https://cloud.google.com/apis/design/resource_names. - *

* For `projects.regions.workflowTemplates,list`, the resource name of the region has - * the following format: `projects/{project_id}/regions/{region}` - *

* For `projects.locations.workflowTemplates.list`, the resource name of the location - * has the following format: `projects/{project_id}/locations/{location}` + *

    + *
  • For `projects.regions.workflowTemplates,list`, the resource name of the region has + * the following format: `projects/{project_id}/regions/{region}` + *
+ *
    + *
  • For `projects.locations.workflowTemplates.list`, the resource name of the location + * has the following format: `projects/{project_id}/locations/{location}` + *
+ * * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ - public final ListWorkflowTemplatesPagedResponse listWorkflowTemplates(RegionName parent) { + public final ListWorkflowTemplatesPagedResponse listWorkflowTemplates(LocationName parent) { ListWorkflowTemplatesRequest request = ListWorkflowTemplatesRequest.newBuilder() .setParent(parent == null ? null : parent.toString()) @@ -1174,30 +916,24 @@ public final ListWorkflowTemplatesPagedResponse listWorkflowTemplates(RegionName return listWorkflowTemplates(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Lists workflows that match the specified filter in the request. * - *

Sample code: - * - *


-   * try (WorkflowTemplateServiceClient workflowTemplateServiceClient = WorkflowTemplateServiceClient.create()) {
-   *   LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]");
-   *   for (WorkflowTemplate element : workflowTemplateServiceClient.listWorkflowTemplates(parent).iterateAll()) {
-   *     // doThingsWith(element);
-   *   }
-   * }
-   * 
- * * @param parent Required. The resource name of the region or location, as described in * https://cloud.google.com/apis/design/resource_names. - *

* For `projects.regions.workflowTemplates,list`, the resource name of the region has - * the following format: `projects/{project_id}/regions/{region}` - *

* For `projects.locations.workflowTemplates.list`, the resource name of the location - * has the following format: `projects/{project_id}/locations/{location}` + *

    + *
  • For `projects.regions.workflowTemplates,list`, the resource name of the region has + * the following format: `projects/{project_id}/regions/{region}` + *
+ *
    + *
  • For `projects.locations.workflowTemplates.list`, the resource name of the location + * has the following format: `projects/{project_id}/locations/{location}` + *
+ * * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ - public final ListWorkflowTemplatesPagedResponse listWorkflowTemplates(LocationName parent) { + public final ListWorkflowTemplatesPagedResponse listWorkflowTemplates(RegionName parent) { ListWorkflowTemplatesRequest request = ListWorkflowTemplatesRequest.newBuilder() .setParent(parent == null ? null : parent.toString()) @@ -1205,27 +941,21 @@ public final ListWorkflowTemplatesPagedResponse listWorkflowTemplates(LocationNa return listWorkflowTemplates(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Lists workflows that match the specified filter in the request. * - *

Sample code: - * - *


-   * try (WorkflowTemplateServiceClient workflowTemplateServiceClient = WorkflowTemplateServiceClient.create()) {
-   *   RegionName parent = RegionName.of("[PROJECT]", "[REGION]");
-   *   for (WorkflowTemplate element : workflowTemplateServiceClient.listWorkflowTemplates(parent.toString()).iterateAll()) {
-   *     // doThingsWith(element);
-   *   }
-   * }
-   * 
- * * @param parent Required. The resource name of the region or location, as described in * https://cloud.google.com/apis/design/resource_names. - *

* For `projects.regions.workflowTemplates,list`, the resource name of the region has - * the following format: `projects/{project_id}/regions/{region}` - *

* For `projects.locations.workflowTemplates.list`, the resource name of the location - * has the following format: `projects/{project_id}/locations/{location}` + *

    + *
  • For `projects.regions.workflowTemplates,list`, the resource name of the region has + * the following format: `projects/{project_id}/regions/{region}` + *
+ *
    + *
  • For `projects.locations.workflowTemplates.list`, the resource name of the location + * has the following format: `projects/{project_id}/locations/{location}` + *
+ * * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ public final ListWorkflowTemplatesPagedResponse listWorkflowTemplates(String parent) { @@ -1234,24 +964,10 @@ public final ListWorkflowTemplatesPagedResponse listWorkflowTemplates(String par return listWorkflowTemplates(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Lists workflows that match the specified filter in the request. * - *

Sample code: - * - *


-   * try (WorkflowTemplateServiceClient workflowTemplateServiceClient = WorkflowTemplateServiceClient.create()) {
-   *   RegionName parent = RegionName.of("[PROJECT]", "[REGION]");
-   *   ListWorkflowTemplatesRequest request = ListWorkflowTemplatesRequest.newBuilder()
-   *     .setParent(parent.toString())
-   *     .build();
-   *   for (WorkflowTemplate element : workflowTemplateServiceClient.listWorkflowTemplates(request).iterateAll()) {
-   *     // doThingsWith(element);
-   *   }
-   * }
-   * 
- * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -1260,84 +976,45 @@ public final ListWorkflowTemplatesPagedResponse listWorkflowTemplates( return listWorkflowTemplatesPagedCallable().call(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Lists workflows that match the specified filter in the request. * *

Sample code: - * - *


-   * try (WorkflowTemplateServiceClient workflowTemplateServiceClient = WorkflowTemplateServiceClient.create()) {
-   *   RegionName parent = RegionName.of("[PROJECT]", "[REGION]");
-   *   ListWorkflowTemplatesRequest request = ListWorkflowTemplatesRequest.newBuilder()
-   *     .setParent(parent.toString())
-   *     .build();
-   *   ApiFuture<ListWorkflowTemplatesPagedResponse> future = workflowTemplateServiceClient.listWorkflowTemplatesPagedCallable().futureCall(request);
-   *   // Do something
-   *   for (WorkflowTemplate element : future.get().iterateAll()) {
-   *     // doThingsWith(element);
-   *   }
-   * }
-   * 
*/ public final UnaryCallable listWorkflowTemplatesPagedCallable() { return stub.listWorkflowTemplatesPagedCallable(); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Lists workflows that match the specified filter in the request. * *

Sample code: - * - *


-   * try (WorkflowTemplateServiceClient workflowTemplateServiceClient = WorkflowTemplateServiceClient.create()) {
-   *   RegionName parent = RegionName.of("[PROJECT]", "[REGION]");
-   *   ListWorkflowTemplatesRequest request = ListWorkflowTemplatesRequest.newBuilder()
-   *     .setParent(parent.toString())
-   *     .build();
-   *   while (true) {
-   *     ListWorkflowTemplatesResponse response = workflowTemplateServiceClient.listWorkflowTemplatesCallable().call(request);
-   *     for (WorkflowTemplate element : response.getTemplatesList()) {
-   *       // doThingsWith(element);
-   *     }
-   *     String nextPageToken = response.getNextPageToken();
-   *     if (!Strings.isNullOrEmpty(nextPageToken)) {
-   *       request = request.toBuilder().setPageToken(nextPageToken).build();
-   *     } else {
-   *       break;
-   *     }
-   *   }
-   * }
-   * 
*/ public final UnaryCallable listWorkflowTemplatesCallable() { return stub.listWorkflowTemplatesCallable(); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Deletes a workflow template. It does not cancel in-progress workflows. * - *

Sample code: - * - *


-   * try (WorkflowTemplateServiceClient workflowTemplateServiceClient = WorkflowTemplateServiceClient.create()) {
-   *   WorkflowTemplateName name = WorkflowTemplateName.ofProjectRegionWorkflowTemplateName("[PROJECT]", "[REGION]", "[WORKFLOW_TEMPLATE]");
-   *   workflowTemplateServiceClient.deleteWorkflowTemplate(name);
-   * }
-   * 
- * * @param name Required. The resource name of the workflow template, as described in * https://cloud.google.com/apis/design/resource_names. - *

* For `projects.regions.workflowTemplates.delete`, the resource name of the template - * has the following format: - * `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}` - *

* For `projects.locations.workflowTemplates.instantiate`, the resource name of the - * template has the following format: - * `projects/{project_id}/locations/{location}/workflowTemplates/{template_id}` + *

    + *
  • For `projects.regions.workflowTemplates.delete`, the resource name of the template + * has the following format: + * `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}` + *
+ *
    + *
  • For `projects.locations.workflowTemplates.instantiate`, the resource name of the + * template has the following format: + * `projects/{project_id}/locations/{location}/workflowTemplates/{template_id}` + *
+ * * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ public final void deleteWorkflowTemplate(WorkflowTemplateName name) { @@ -1348,27 +1025,23 @@ public final void deleteWorkflowTemplate(WorkflowTemplateName name) { deleteWorkflowTemplate(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Deletes a workflow template. It does not cancel in-progress workflows. * - *

Sample code: - * - *


-   * try (WorkflowTemplateServiceClient workflowTemplateServiceClient = WorkflowTemplateServiceClient.create()) {
-   *   WorkflowTemplateName name = WorkflowTemplateName.ofProjectRegionWorkflowTemplateName("[PROJECT]", "[REGION]", "[WORKFLOW_TEMPLATE]");
-   *   workflowTemplateServiceClient.deleteWorkflowTemplate(name.toString());
-   * }
-   * 
- * * @param name Required. The resource name of the workflow template, as described in * https://cloud.google.com/apis/design/resource_names. - *

* For `projects.regions.workflowTemplates.delete`, the resource name of the template - * has the following format: - * `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}` - *

* For `projects.locations.workflowTemplates.instantiate`, the resource name of the - * template has the following format: - * `projects/{project_id}/locations/{location}/workflowTemplates/{template_id}` + *

    + *
  • For `projects.regions.workflowTemplates.delete`, the resource name of the template + * has the following format: + * `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}` + *
+ *
    + *
  • For `projects.locations.workflowTemplates.instantiate`, the resource name of the + * template has the following format: + * `projects/{project_id}/locations/{location}/workflowTemplates/{template_id}` + *
+ * * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ public final void deleteWorkflowTemplate(String name) { @@ -1377,22 +1050,10 @@ public final void deleteWorkflowTemplate(String name) { deleteWorkflowTemplate(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Deletes a workflow template. It does not cancel in-progress workflows. * - *

Sample code: - * - *


-   * try (WorkflowTemplateServiceClient workflowTemplateServiceClient = WorkflowTemplateServiceClient.create()) {
-   *   WorkflowTemplateName name = WorkflowTemplateName.ofProjectRegionWorkflowTemplateName("[PROJECT]", "[REGION]", "[WORKFLOW_TEMPLATE]");
-   *   DeleteWorkflowTemplateRequest request = DeleteWorkflowTemplateRequest.newBuilder()
-   *     .setName(name.toString())
-   *     .build();
-   *   workflowTemplateServiceClient.deleteWorkflowTemplate(request);
-   * }
-   * 
- * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -1400,23 +1061,11 @@ public final void deleteWorkflowTemplate(DeleteWorkflowTemplateRequest request) deleteWorkflowTemplateCallable().call(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Deletes a workflow template. It does not cancel in-progress workflows. * *

Sample code: - * - *


-   * try (WorkflowTemplateServiceClient workflowTemplateServiceClient = WorkflowTemplateServiceClient.create()) {
-   *   WorkflowTemplateName name = WorkflowTemplateName.ofProjectRegionWorkflowTemplateName("[PROJECT]", "[REGION]", "[WORKFLOW_TEMPLATE]");
-   *   DeleteWorkflowTemplateRequest request = DeleteWorkflowTemplateRequest.newBuilder()
-   *     .setName(name.toString())
-   *     .build();
-   *   ApiFuture<Void> future = workflowTemplateServiceClient.deleteWorkflowTemplateCallable().futureCall(request);
-   *   // Do something
-   *   future.get();
-   * }
-   * 
*/ public final UnaryCallable deleteWorkflowTemplateCallable() { diff --git a/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/WorkflowTemplateServiceSettings.java b/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/WorkflowTemplateServiceSettings.java index 1cdc2e87..0d0f9e3c 100644 --- a/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/WorkflowTemplateServiceSettings.java +++ b/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/WorkflowTemplateServiceSettings.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,6 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.dataproc.v1beta2; import static com.google.cloud.dataproc.v1beta2.WorkflowTemplateServiceClient.ListWorkflowTemplatesPagedResponse; @@ -36,7 +37,7 @@ import java.util.List; import javax.annotation.Generated; -// AUTO-GENERATED DOCUMENTATION AND CLASS +// AUTO-GENERATED DOCUMENTATION AND CLASS. /** * Settings class to configure an instance of {@link WorkflowTemplateServiceClient}. * @@ -53,24 +54,39 @@ * *

For example, to set the total timeout of createWorkflowTemplate to 30 seconds: * - *

- * 
+ * 
{@code
  * WorkflowTemplateServiceSettings.Builder workflowTemplateServiceSettingsBuilder =
  *     WorkflowTemplateServiceSettings.newBuilder();
  * workflowTemplateServiceSettingsBuilder
  *     .createWorkflowTemplateSettings()
  *     .setRetrySettings(
- *         workflowTemplateServiceSettingsBuilder.createWorkflowTemplateSettings().getRetrySettings().toBuilder()
+ *         workflowTemplateServiceSettingsBuilder
+ *             .createWorkflowTemplateSettings()
+ *             .getRetrySettings()
+ *             .toBuilder()
  *             .setTotalTimeout(Duration.ofSeconds(30))
  *             .build());
- * WorkflowTemplateServiceSettings workflowTemplateServiceSettings = workflowTemplateServiceSettingsBuilder.build();
- * 
- * 
+ * WorkflowTemplateServiceSettings workflowTemplateServiceSettings = + * workflowTemplateServiceSettingsBuilder.build(); + * }
*/ -@Generated("by gapic-generator") -@BetaApi +@Generated("by gapic-generator-java") public class WorkflowTemplateServiceSettings extends ClientSettings { + + /** Returns the object with the settings used for calls to createWorkflowTemplate. */ + public UnaryCallSettings + createWorkflowTemplateSettings() { + return ((WorkflowTemplateServiceStubSettings) getStubSettings()) + .createWorkflowTemplateSettings(); + } + + /** Returns the object with the settings used for calls to getWorkflowTemplate. */ + public UnaryCallSettings + getWorkflowTemplateSettings() { + return ((WorkflowTemplateServiceStubSettings) getStubSettings()).getWorkflowTemplateSettings(); + } + /** Returns the object with the settings used for calls to instantiateWorkflowTemplate. */ public UnaryCallSettings instantiateWorkflowTemplateSettings() { @@ -79,8 +95,6 @@ public class WorkflowTemplateServiceSettings } /** Returns the object with the settings used for calls to instantiateWorkflowTemplate. */ - @BetaApi( - "The surface for long-running operations is not stable yet and may change in the future.") public OperationCallSettings instantiateWorkflowTemplateOperationSettings() { return ((WorkflowTemplateServiceStubSettings) getStubSettings()) @@ -95,27 +109,12 @@ public class WorkflowTemplateServiceSettings } /** Returns the object with the settings used for calls to instantiateInlineWorkflowTemplate. */ - @BetaApi( - "The surface for long-running operations is not stable yet and may change in the future.") public OperationCallSettings instantiateInlineWorkflowTemplateOperationSettings() { return ((WorkflowTemplateServiceStubSettings) getStubSettings()) .instantiateInlineWorkflowTemplateOperationSettings(); } - /** Returns the object with the settings used for calls to createWorkflowTemplate. */ - public UnaryCallSettings - createWorkflowTemplateSettings() { - return ((WorkflowTemplateServiceStubSettings) getStubSettings()) - .createWorkflowTemplateSettings(); - } - - /** Returns the object with the settings used for calls to getWorkflowTemplate. */ - public UnaryCallSettings - getWorkflowTemplateSettings() { - return ((WorkflowTemplateServiceStubSettings) getStubSettings()).getWorkflowTemplateSettings(); - } - /** Returns the object with the settings used for calls to updateWorkflowTemplate. */ public UnaryCallSettings updateWorkflowTemplateSettings() { @@ -200,18 +199,15 @@ protected WorkflowTemplateServiceSettings(Builder settingsBuilder) throws IOExce /** Builder for WorkflowTemplateServiceSettings. */ public static class Builder extends ClientSettings.Builder { + protected Builder() throws IOException { - this((ClientContext) null); + this(((ClientContext) null)); } protected Builder(ClientContext clientContext) { super(WorkflowTemplateServiceStubSettings.newBuilder(clientContext)); } - private static Builder createDefault() { - return new Builder(WorkflowTemplateServiceStubSettings.newBuilder()); - } - protected Builder(WorkflowTemplateServiceSettings settings) { super(settings.getStubSettings().toBuilder()); } @@ -220,11 +216,15 @@ protected Builder(WorkflowTemplateServiceStubSettings.Builder stubSettings) { super(stubSettings); } + private static Builder createDefault() { + return new Builder(WorkflowTemplateServiceStubSettings.newBuilder()); + } + public WorkflowTemplateServiceStubSettings.Builder getStubSettingsBuilder() { return ((WorkflowTemplateServiceStubSettings.Builder) getStubSettings()); } - // NEXT_MAJOR_VER: remove 'throws Exception' + // NEXT_MAJOR_VER: remove 'throws Exception'. /** * Applies the given settings updater function to all of the unary API methods in this service. * @@ -237,6 +237,18 @@ public Builder applyToAllUnaryMethods( return this; } + /** Returns the builder for the settings used for calls to createWorkflowTemplate. */ + public UnaryCallSettings.Builder + createWorkflowTemplateSettings() { + return getStubSettingsBuilder().createWorkflowTemplateSettings(); + } + + /** Returns the builder for the settings used for calls to getWorkflowTemplate. */ + public UnaryCallSettings.Builder + getWorkflowTemplateSettings() { + return getStubSettingsBuilder().getWorkflowTemplateSettings(); + } + /** Returns the builder for the settings used for calls to instantiateWorkflowTemplate. */ public UnaryCallSettings.Builder instantiateWorkflowTemplateSettings() { @@ -244,8 +256,6 @@ public Builder applyToAllUnaryMethods( } /** Returns the builder for the settings used for calls to instantiateWorkflowTemplate. */ - @BetaApi( - "The surface for long-running operations is not stable yet and may change in the future.") public OperationCallSettings.Builder< InstantiateWorkflowTemplateRequest, Empty, WorkflowMetadata> instantiateWorkflowTemplateOperationSettings() { @@ -259,26 +269,12 @@ public Builder applyToAllUnaryMethods( } /** Returns the builder for the settings used for calls to instantiateInlineWorkflowTemplate. */ - @BetaApi( - "The surface for long-running operations is not stable yet and may change in the future.") public OperationCallSettings.Builder< InstantiateInlineWorkflowTemplateRequest, Empty, WorkflowMetadata> instantiateInlineWorkflowTemplateOperationSettings() { return getStubSettingsBuilder().instantiateInlineWorkflowTemplateOperationSettings(); } - /** Returns the builder for the settings used for calls to createWorkflowTemplate. */ - public UnaryCallSettings.Builder - createWorkflowTemplateSettings() { - return getStubSettingsBuilder().createWorkflowTemplateSettings(); - } - - /** Returns the builder for the settings used for calls to getWorkflowTemplate. */ - public UnaryCallSettings.Builder - getWorkflowTemplateSettings() { - return getStubSettingsBuilder().getWorkflowTemplateSettings(); - } - /** Returns the builder for the settings used for calls to updateWorkflowTemplate. */ public UnaryCallSettings.Builder updateWorkflowTemplateSettings() { diff --git a/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/package-info.java b/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/package-info.java index 76ab2821..4fc507e1 100644 --- a/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/package-info.java +++ b/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/package-info.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,79 +15,35 @@ */ /** - * A client to Cloud Dataproc API. + * The interfaces provided are listed below, along with usage samples. * - *

The interfaces provided are listed below, along with usage samples. - * - *

============================== AutoscalingPolicyServiceClient ============================== + *

======================= AutoscalingPolicyServiceClient ======================= * *

Service Description: The API interface for managing autoscaling policies in the Cloud Dataproc * API. * *

Sample for AutoscalingPolicyServiceClient: * - *

- * 
- * try (AutoscalingPolicyServiceClient autoscalingPolicyServiceClient = AutoscalingPolicyServiceClient.create()) {
- *   RegionName parent = RegionName.of("[PROJECT]", "[REGION]");
- *   AutoscalingPolicy policy = AutoscalingPolicy.newBuilder().build();
- *   AutoscalingPolicy response = autoscalingPolicyServiceClient.createAutoscalingPolicy(parent, policy);
- * }
- * 
- * 
- * - * ======================= ClusterControllerClient ======================= + *

======================= ClusterControllerClient ======================= * *

Service Description: The ClusterControllerService provides methods to manage clusters of * Compute Engine instances. * *

Sample for ClusterControllerClient: * - *

- * 
- * try (ClusterControllerClient clusterControllerClient = ClusterControllerClient.create()) {
- *   String projectId = "";
- *   String region = "";
- *   String clusterName = "";
- *   Cluster response = clusterControllerClient.getCluster(projectId, region, clusterName);
- * }
- * 
- * 
- * - * =================== JobControllerClient =================== + *

======================= JobControllerClient ======================= * *

Service Description: The JobController provides methods to manage jobs. * *

Sample for JobControllerClient: * - *

- * 
- * try (JobControllerClient jobControllerClient = JobControllerClient.create()) {
- *   String projectId = "";
- *   String region = "";
- *   Job job = Job.newBuilder().build();
- *   Job response = jobControllerClient.submitJob(projectId, region, job);
- * }
- * 
- * 
- * - * ============================= WorkflowTemplateServiceClient ============================= + *

======================= WorkflowTemplateServiceClient ======================= * *

Service Description: The API interface for managing Workflow Templates in the Dataproc API. * *

Sample for WorkflowTemplateServiceClient: - * - *

- * 
- * try (WorkflowTemplateServiceClient workflowTemplateServiceClient = WorkflowTemplateServiceClient.create()) {
- *   RegionName parent = RegionName.of("[PROJECT]", "[REGION]");
- *   WorkflowTemplate template = WorkflowTemplate.newBuilder().build();
- *   WorkflowTemplate response = workflowTemplateServiceClient.createWorkflowTemplate(parent, template);
- * }
- * 
- * 
*/ -@Generated("by gapic-generator") +@Generated("by gapic-generator-java") package com.google.cloud.dataproc.v1beta2; import javax.annotation.Generated; diff --git a/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/stub/AutoscalingPolicyServiceStub.java b/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/stub/AutoscalingPolicyServiceStub.java index d47ffbe7..454054a2 100644 --- a/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/stub/AutoscalingPolicyServiceStub.java +++ b/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/stub/AutoscalingPolicyServiceStub.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,11 +13,11 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.dataproc.v1beta2.stub; import static com.google.cloud.dataproc.v1beta2.AutoscalingPolicyServiceClient.ListAutoscalingPoliciesPagedResponse; -import com.google.api.core.BetaApi; import com.google.api.gax.core.BackgroundResource; import com.google.api.gax.rpc.UnaryCallable; import com.google.cloud.dataproc.v1beta2.AutoscalingPolicy; @@ -30,14 +30,13 @@ import com.google.protobuf.Empty; import javax.annotation.Generated; -// AUTO-GENERATED DOCUMENTATION AND CLASS +// AUTO-GENERATED DOCUMENTATION AND CLASS. /** - * Base stub class for Cloud Dataproc API. + * Base stub class for the AutoscalingPolicyService service API. * *

This class is for advanced usage and reflects the underlying API directly. */ @Generated("by gapic-generator") -@BetaApi("A restructuring of stub classes is planned, so this may break in the future") public abstract class AutoscalingPolicyServiceStub implements BackgroundResource { public UnaryCallable diff --git a/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/stub/AutoscalingPolicyServiceStubSettings.java b/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/stub/AutoscalingPolicyServiceStubSettings.java index 03020574..fd716b84 100644 --- a/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/stub/AutoscalingPolicyServiceStubSettings.java +++ b/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/stub/AutoscalingPolicyServiceStubSettings.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,6 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.dataproc.v1beta2.stub; import static com.google.cloud.dataproc.v1beta2.AutoscalingPolicyServiceClient.ListAutoscalingPoliciesPagedResponse; @@ -56,7 +57,7 @@ import javax.annotation.Generated; import org.threeten.bp.Duration; -// AUTO-GENERATED DOCUMENTATION AND CLASS +// AUTO-GENERATED DOCUMENTATION AND CLASS. /** * Settings class to configure an instance of {@link AutoscalingPolicyServiceStub}. * @@ -73,22 +74,24 @@ * *

For example, to set the total timeout of createAutoscalingPolicy to 30 seconds: * - *

- * 
+ * 
{@code
  * AutoscalingPolicyServiceStubSettings.Builder autoscalingPolicyServiceSettingsBuilder =
  *     AutoscalingPolicyServiceStubSettings.newBuilder();
  * autoscalingPolicyServiceSettingsBuilder
  *     .createAutoscalingPolicySettings()
  *     .setRetrySettings(
- *         autoscalingPolicyServiceSettingsBuilder.createAutoscalingPolicySettings().getRetrySettings().toBuilder()
+ *         autoscalingPolicyServiceSettingsBuilder
+ *             .createAutoscalingPolicySettings()
+ *             .getRetrySettings()
+ *             .toBuilder()
  *             .setTotalTimeout(Duration.ofSeconds(30))
  *             .build());
- * AutoscalingPolicyServiceStubSettings autoscalingPolicyServiceSettings = autoscalingPolicyServiceSettingsBuilder.build();
- * 
- * 
+ * AutoscalingPolicyServiceStubSettings autoscalingPolicyServiceSettings = + * autoscalingPolicyServiceSettingsBuilder.build(); + * }
*/ -@Generated("by gapic-generator") @BetaApi +@Generated("by gapic-generator-java") public class AutoscalingPolicyServiceStubSettings extends StubSettings { /** The default scopes of the service. */ @@ -109,6 +112,78 @@ public class AutoscalingPolicyServiceStubSettings private final UnaryCallSettings deleteAutoscalingPolicySettings; + private static final PagedListDescriptor< + ListAutoscalingPoliciesRequest, ListAutoscalingPoliciesResponse, AutoscalingPolicy> + LIST_AUTOSCALING_POLICIES_PAGE_STR_DESC = + new PagedListDescriptor< + ListAutoscalingPoliciesRequest, + ListAutoscalingPoliciesResponse, + AutoscalingPolicy>() { + @Override + public String emptyToken() { + return ""; + } + + @Override + public ListAutoscalingPoliciesRequest injectToken( + ListAutoscalingPoliciesRequest payload, String token) { + return ListAutoscalingPoliciesRequest.newBuilder(payload).setPageToken(token).build(); + } + + @Override + public ListAutoscalingPoliciesRequest injectPageSize( + ListAutoscalingPoliciesRequest payload, int pageSize) { + return ListAutoscalingPoliciesRequest.newBuilder(payload) + .setPageSize(pageSize) + .build(); + } + + @Override + public Integer extractPageSize(ListAutoscalingPoliciesRequest payload) { + return payload.getPageSize(); + } + + @Override + public String extractNextToken(ListAutoscalingPoliciesResponse payload) { + return payload.getNextPageToken(); + } + + @Override + public Iterable extractResources( + ListAutoscalingPoliciesResponse payload) { + return payload.getPoliciesList() == null + ? ImmutableList.of() + : payload.getPoliciesList(); + } + }; + + private static final PagedListResponseFactory< + ListAutoscalingPoliciesRequest, + ListAutoscalingPoliciesResponse, + ListAutoscalingPoliciesPagedResponse> + LIST_AUTOSCALING_POLICIES_PAGE_STR_FACT = + new PagedListResponseFactory< + ListAutoscalingPoliciesRequest, + ListAutoscalingPoliciesResponse, + ListAutoscalingPoliciesPagedResponse>() { + @Override + public ApiFuture getFuturePagedResponse( + UnaryCallable + callable, + ListAutoscalingPoliciesRequest request, + ApiCallContext context, + ApiFuture futureResponse) { + PageContext< + ListAutoscalingPoliciesRequest, + ListAutoscalingPoliciesResponse, + AutoscalingPolicy> + pageContext = + PageContext.create( + callable, LIST_AUTOSCALING_POLICIES_PAGE_STR_DESC, request, context); + return ListAutoscalingPoliciesPagedResponse.createAsync(pageContext, futureResponse); + } + }; + /** Returns the object with the settings used for calls to createAutoscalingPolicy. */ public UnaryCallSettings createAutoscalingPolicySettings() { @@ -148,10 +223,10 @@ public AutoscalingPolicyServiceStub createStub() throws IOException { .getTransportName() .equals(GrpcTransportChannel.getGrpcTransportName())) { return GrpcAutoscalingPolicyServiceStub.create(this); - } else { - throw new UnsupportedOperationException( - "Transport not supported: " + getTransportChannelProvider().getTransportName()); } + throw new UnsupportedOperationException( + String.format( + "Transport not supported: %s", getTransportChannelProvider().getTransportName())); } /** Returns a builder for the default ExecutorProvider for this service. */ @@ -218,83 +293,10 @@ protected AutoscalingPolicyServiceStubSettings(Builder settingsBuilder) throws I deleteAutoscalingPolicySettings = settingsBuilder.deleteAutoscalingPolicySettings().build(); } - private static final PagedListDescriptor< - ListAutoscalingPoliciesRequest, ListAutoscalingPoliciesResponse, AutoscalingPolicy> - LIST_AUTOSCALING_POLICIES_PAGE_STR_DESC = - new PagedListDescriptor< - ListAutoscalingPoliciesRequest, - ListAutoscalingPoliciesResponse, - AutoscalingPolicy>() { - @Override - public String emptyToken() { - return ""; - } - - @Override - public ListAutoscalingPoliciesRequest injectToken( - ListAutoscalingPoliciesRequest payload, String token) { - return ListAutoscalingPoliciesRequest.newBuilder(payload).setPageToken(token).build(); - } - - @Override - public ListAutoscalingPoliciesRequest injectPageSize( - ListAutoscalingPoliciesRequest payload, int pageSize) { - return ListAutoscalingPoliciesRequest.newBuilder(payload) - .setPageSize(pageSize) - .build(); - } - - @Override - public Integer extractPageSize(ListAutoscalingPoliciesRequest payload) { - return payload.getPageSize(); - } - - @Override - public String extractNextToken(ListAutoscalingPoliciesResponse payload) { - return payload.getNextPageToken(); - } - - @Override - public Iterable extractResources( - ListAutoscalingPoliciesResponse payload) { - return payload.getPoliciesList() != null - ? payload.getPoliciesList() - : ImmutableList.of(); - } - }; - - private static final PagedListResponseFactory< - ListAutoscalingPoliciesRequest, - ListAutoscalingPoliciesResponse, - ListAutoscalingPoliciesPagedResponse> - LIST_AUTOSCALING_POLICIES_PAGE_STR_FACT = - new PagedListResponseFactory< - ListAutoscalingPoliciesRequest, - ListAutoscalingPoliciesResponse, - ListAutoscalingPoliciesPagedResponse>() { - @Override - public ApiFuture getFuturePagedResponse( - UnaryCallable - callable, - ListAutoscalingPoliciesRequest request, - ApiCallContext context, - ApiFuture futureResponse) { - PageContext< - ListAutoscalingPoliciesRequest, - ListAutoscalingPoliciesResponse, - AutoscalingPolicy> - pageContext = - PageContext.create( - callable, LIST_AUTOSCALING_POLICIES_PAGE_STR_DESC, request, context); - return ListAutoscalingPoliciesPagedResponse.createAsync(pageContext, futureResponse); - } - }; - /** Builder for AutoscalingPolicyServiceStubSettings. */ public static class Builder extends StubSettings.Builder { private final ImmutableList> unaryMethodSettingsBuilders; - private final UnaryCallSettings.Builder createAutoscalingPolicySettings; private final UnaryCallSettings.Builder @@ -308,51 +310,19 @@ public static class Builder listAutoscalingPoliciesSettings; private final UnaryCallSettings.Builder deleteAutoscalingPolicySettings; - private static final ImmutableMap> RETRYABLE_CODE_DEFINITIONS; static { ImmutableMap.Builder> definitions = ImmutableMap.builder(); + definitions.put( + "no_retry_0_codes", ImmutableSet.copyOf(Lists.newArrayList())); definitions.put( "retry_policy_1_codes", ImmutableSet.copyOf( Lists.newArrayList( StatusCode.Code.DEADLINE_EXCEEDED, StatusCode.Code.UNAVAILABLE))); - definitions.put( - "retry_policy_4_codes", - ImmutableSet.copyOf(Lists.newArrayList(StatusCode.Code.UNAVAILABLE))); - definitions.put( - "retry_policy_6_codes", - ImmutableSet.copyOf(Lists.newArrayList(StatusCode.Code.UNAVAILABLE))); - definitions.put("no_retry_codes", ImmutableSet.copyOf(Lists.newArrayList())); - definitions.put( - "retry_policy_3_codes", - ImmutableSet.copyOf( - Lists.newArrayList( - StatusCode.Code.INTERNAL, - StatusCode.Code.DEADLINE_EXCEEDED, - StatusCode.Code.UNAVAILABLE))); - definitions.put( - "retry_policy_2_codes", - ImmutableSet.copyOf(Lists.newArrayList(StatusCode.Code.UNAVAILABLE))); - definitions.put( - "no_retry_1_codes", ImmutableSet.copyOf(Lists.newArrayList())); - definitions.put( - "retry_policy_5_codes", - ImmutableSet.copyOf( - Lists.newArrayList( - StatusCode.Code.DEADLINE_EXCEEDED, - StatusCode.Code.INTERNAL, - StatusCode.Code.UNAVAILABLE))); - definitions.put( - "retry_policy_7_codes", - ImmutableSet.copyOf( - Lists.newArrayList( - StatusCode.Code.DEADLINE_EXCEEDED, - StatusCode.Code.INTERNAL, - StatusCode.Code.UNAVAILABLE))); RETRYABLE_CODE_DEFINITIONS = definitions.build(); } @@ -363,110 +333,38 @@ public static class Builder RetrySettings settings = null; settings = RetrySettings.newBuilder() - .setInitialRetryDelay(Duration.ofMillis(100L)) - .setRetryDelayMultiplier(1.3) - .setMaxRetryDelay(Duration.ofMillis(60000L)) - .setInitialRpcTimeout(Duration.ofMillis(600000L)) - .setRpcTimeoutMultiplier(1.0) - .setMaxRpcTimeout(Duration.ofMillis(600000L)) - .setTotalTimeout(Duration.ofMillis(600000L)) - .build(); - definitions.put("retry_policy_1_params", settings); - settings = - RetrySettings.newBuilder() - .setInitialRetryDelay(Duration.ofMillis(100L)) - .setRetryDelayMultiplier(1.3) - .setMaxRetryDelay(Duration.ofMillis(60000L)) - .setInitialRpcTimeout(Duration.ofMillis(600000L)) - .setRpcTimeoutMultiplier(1.0) - .setMaxRpcTimeout(Duration.ofMillis(600000L)) - .setTotalTimeout(Duration.ofMillis(600000L)) - .build(); - definitions.put("retry_policy_6_params", settings); - settings = - RetrySettings.newBuilder() - .setInitialRetryDelay(Duration.ofMillis(100L)) - .setRetryDelayMultiplier(1.3) - .setMaxRetryDelay(Duration.ofMillis(60000L)) - .setInitialRpcTimeout(Duration.ofMillis(300000L)) - .setRpcTimeoutMultiplier(1.0) - .setMaxRpcTimeout(Duration.ofMillis(300000L)) - .setTotalTimeout(Duration.ofMillis(300000L)) - .build(); - definitions.put("retry_policy_2_params", settings); - settings = - RetrySettings.newBuilder() - .setInitialRetryDelay(Duration.ofMillis(100L)) - .setRetryDelayMultiplier(1.3) - .setMaxRetryDelay(Duration.ofMillis(60000L)) - .setInitialRpcTimeout(Duration.ofMillis(300000L)) - .setRpcTimeoutMultiplier(1.0) - .setMaxRpcTimeout(Duration.ofMillis(300000L)) - .setTotalTimeout(Duration.ofMillis(300000L)) - .build(); - definitions.put("retry_policy_3_params", settings); - settings = - RetrySettings.newBuilder() - .setInitialRetryDelay(Duration.ofMillis(100L)) - .setRetryDelayMultiplier(1.3) - .setMaxRetryDelay(Duration.ofMillis(60000L)) .setInitialRpcTimeout(Duration.ofMillis(600000L)) .setRpcTimeoutMultiplier(1.0) .setMaxRpcTimeout(Duration.ofMillis(600000L)) .setTotalTimeout(Duration.ofMillis(600000L)) .build(); - definitions.put("retry_policy_7_params", settings); - settings = - RetrySettings.newBuilder() - .setInitialRetryDelay(Duration.ofMillis(100L)) - .setRetryDelayMultiplier(1.3) - .setMaxRetryDelay(Duration.ofMillis(60000L)) - .setInitialRpcTimeout(Duration.ofMillis(900000L)) - .setRpcTimeoutMultiplier(1.0) - .setMaxRpcTimeout(Duration.ofMillis(900000L)) - .setTotalTimeout(Duration.ofMillis(900000L)) - .build(); - definitions.put("retry_policy_5_params", settings); + definitions.put("no_retry_0_params", settings); settings = RetrySettings.newBuilder() .setInitialRetryDelay(Duration.ofMillis(100L)) .setRetryDelayMultiplier(1.3) .setMaxRetryDelay(Duration.ofMillis(60000L)) - .setInitialRpcTimeout(Duration.ofMillis(900000L)) - .setRpcTimeoutMultiplier(1.0) - .setMaxRpcTimeout(Duration.ofMillis(900000L)) - .setTotalTimeout(Duration.ofMillis(900000L)) - .build(); - definitions.put("retry_policy_4_params", settings); - settings = RetrySettings.newBuilder().setRpcTimeoutMultiplier(1.0).build(); - definitions.put("no_retry_params", settings); - settings = - RetrySettings.newBuilder() .setInitialRpcTimeout(Duration.ofMillis(600000L)) .setRpcTimeoutMultiplier(1.0) .setMaxRpcTimeout(Duration.ofMillis(600000L)) .setTotalTimeout(Duration.ofMillis(600000L)) .build(); - definitions.put("no_retry_1_params", settings); + definitions.put("retry_policy_1_params", settings); RETRY_PARAM_DEFINITIONS = definitions.build(); } protected Builder() { - this((ClientContext) null); + this(((ClientContext) null)); } protected Builder(ClientContext clientContext) { super(clientContext); createAutoscalingPolicySettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); - updateAutoscalingPolicySettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); - getAutoscalingPolicySettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); - listAutoscalingPoliciesSettings = PagedCallSettings.newBuilder(LIST_AUTOSCALING_POLICIES_PAGE_STR_FACT); - deleteAutoscalingPolicySettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); unaryMethodSettingsBuilders = @@ -476,25 +374,43 @@ protected Builder(ClientContext clientContext) { getAutoscalingPolicySettings, listAutoscalingPoliciesSettings, deleteAutoscalingPolicySettings); - initDefaults(this); } + protected Builder(AutoscalingPolicyServiceStubSettings settings) { + super(settings); + + createAutoscalingPolicySettings = settings.createAutoscalingPolicySettings.toBuilder(); + updateAutoscalingPolicySettings = settings.updateAutoscalingPolicySettings.toBuilder(); + getAutoscalingPolicySettings = settings.getAutoscalingPolicySettings.toBuilder(); + listAutoscalingPoliciesSettings = settings.listAutoscalingPoliciesSettings.toBuilder(); + deleteAutoscalingPolicySettings = settings.deleteAutoscalingPolicySettings.toBuilder(); + + unaryMethodSettingsBuilders = + ImmutableList.>of( + createAutoscalingPolicySettings, + updateAutoscalingPolicySettings, + getAutoscalingPolicySettings, + listAutoscalingPoliciesSettings, + deleteAutoscalingPolicySettings); + } + private static Builder createDefault() { - Builder builder = new Builder((ClientContext) null); + Builder builder = new Builder(((ClientContext) null)); + builder.setTransportChannelProvider(defaultTransportChannelProvider()); builder.setCredentialsProvider(defaultCredentialsProviderBuilder().build()); builder.setInternalHeaderProvider(defaultApiClientHeaderProviderBuilder().build()); builder.setEndpoint(getDefaultEndpoint()); + return initDefaults(builder); } private static Builder initDefaults(Builder builder) { - builder .createAutoscalingPolicySettings() - .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes")) - .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params")); + .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_0_codes")) + .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_0_params")); builder .updateAutoscalingPolicySettings() @@ -513,31 +429,13 @@ private static Builder initDefaults(Builder builder) { builder .deleteAutoscalingPolicySettings() - .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes")) - .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params")); + .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_0_codes")) + .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_0_params")); return builder; } - protected Builder(AutoscalingPolicyServiceStubSettings settings) { - super(settings); - - createAutoscalingPolicySettings = settings.createAutoscalingPolicySettings.toBuilder(); - updateAutoscalingPolicySettings = settings.updateAutoscalingPolicySettings.toBuilder(); - getAutoscalingPolicySettings = settings.getAutoscalingPolicySettings.toBuilder(); - listAutoscalingPoliciesSettings = settings.listAutoscalingPoliciesSettings.toBuilder(); - deleteAutoscalingPolicySettings = settings.deleteAutoscalingPolicySettings.toBuilder(); - - unaryMethodSettingsBuilders = - ImmutableList.>of( - createAutoscalingPolicySettings, - updateAutoscalingPolicySettings, - getAutoscalingPolicySettings, - listAutoscalingPoliciesSettings, - deleteAutoscalingPolicySettings); - } - - // NEXT_MAJOR_VER: remove 'throws Exception' + // NEXT_MAJOR_VER: remove 'throws Exception'. /** * Applies the given settings updater function to all of the unary API methods in this service. * diff --git a/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/stub/ClusterControllerStub.java b/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/stub/ClusterControllerStub.java index 0981902f..849645b3 100644 --- a/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/stub/ClusterControllerStub.java +++ b/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/stub/ClusterControllerStub.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,11 +13,11 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.dataproc.v1beta2.stub; import static com.google.cloud.dataproc.v1beta2.ClusterControllerClient.ListClustersPagedResponse; -import com.google.api.core.BetaApi; import com.google.api.gax.core.BackgroundResource; import com.google.api.gax.rpc.OperationCallable; import com.google.api.gax.rpc.UnaryCallable; @@ -35,22 +35,19 @@ import com.google.protobuf.Empty; import javax.annotation.Generated; -// AUTO-GENERATED DOCUMENTATION AND CLASS +// AUTO-GENERATED DOCUMENTATION AND CLASS. /** - * Base stub class for Cloud Dataproc API. + * Base stub class for the ClusterController service API. * *

This class is for advanced usage and reflects the underlying API directly. */ @Generated("by gapic-generator") -@BetaApi("A restructuring of stub classes is planned, so this may break in the future") public abstract class ClusterControllerStub implements BackgroundResource { - @BetaApi("The surface for use by generated code is not stable yet and may change in the future.") public OperationsStub getOperationsStub() { throw new UnsupportedOperationException("Not implemented: getOperationsStub()"); } - @BetaApi("The surface for use by generated code is not stable yet and may change in the future.") public OperationCallable createClusterOperationCallable() { throw new UnsupportedOperationException("Not implemented: createClusterOperationCallable()"); @@ -60,7 +57,6 @@ public UnaryCallable createClusterCallable() { throw new UnsupportedOperationException("Not implemented: createClusterCallable()"); } - @BetaApi("The surface for use by generated code is not stable yet and may change in the future.") public OperationCallable updateClusterOperationCallable() { throw new UnsupportedOperationException("Not implemented: updateClusterOperationCallable()"); @@ -70,7 +66,6 @@ public UnaryCallable updateClusterCallable() { throw new UnsupportedOperationException("Not implemented: updateClusterCallable()"); } - @BetaApi("The surface for use by generated code is not stable yet and may change in the future.") public OperationCallable deleteClusterOperationCallable() { throw new UnsupportedOperationException("Not implemented: deleteClusterOperationCallable()"); @@ -80,16 +75,6 @@ public UnaryCallable deleteClusterCallable() { throw new UnsupportedOperationException("Not implemented: deleteClusterCallable()"); } - @BetaApi("The surface for use by generated code is not stable yet and may change in the future.") - public OperationCallable - diagnoseClusterOperationCallable() { - throw new UnsupportedOperationException("Not implemented: diagnoseClusterOperationCallable()"); - } - - public UnaryCallable diagnoseClusterCallable() { - throw new UnsupportedOperationException("Not implemented: diagnoseClusterCallable()"); - } - public UnaryCallable getClusterCallable() { throw new UnsupportedOperationException("Not implemented: getClusterCallable()"); } @@ -102,6 +87,15 @@ public UnaryCallable listClustersCall throw new UnsupportedOperationException("Not implemented: listClustersCallable()"); } + public OperationCallable + diagnoseClusterOperationCallable() { + throw new UnsupportedOperationException("Not implemented: diagnoseClusterOperationCallable()"); + } + + public UnaryCallable diagnoseClusterCallable() { + throw new UnsupportedOperationException("Not implemented: diagnoseClusterCallable()"); + } + @Override public abstract void close(); } diff --git a/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/stub/ClusterControllerStubSettings.java b/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/stub/ClusterControllerStubSettings.java index 4109126d..7857251c 100644 --- a/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/stub/ClusterControllerStubSettings.java +++ b/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/stub/ClusterControllerStubSettings.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,6 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.dataproc.v1beta2.stub; import static com.google.cloud.dataproc.v1beta2.ClusterControllerClient.ListClustersPagedResponse; @@ -63,7 +64,7 @@ import javax.annotation.Generated; import org.threeten.bp.Duration; -// AUTO-GENERATED DOCUMENTATION AND CLASS +// AUTO-GENERATED DOCUMENTATION AND CLASS. /** * Settings class to configure an instance of {@link ClusterControllerStub}. * @@ -80,22 +81,24 @@ * *

For example, to set the total timeout of getCluster to 30 seconds: * - *

- * 
+ * 
{@code
  * ClusterControllerStubSettings.Builder clusterControllerSettingsBuilder =
  *     ClusterControllerStubSettings.newBuilder();
  * clusterControllerSettingsBuilder
  *     .getClusterSettings()
  *     .setRetrySettings(
- *         clusterControllerSettingsBuilder.getClusterSettings().getRetrySettings().toBuilder()
+ *         clusterControllerSettingsBuilder
+ *             .getClusterSettings()
+ *             .getRetrySettings()
+ *             .toBuilder()
  *             .setTotalTimeout(Duration.ofSeconds(30))
  *             .build());
- * ClusterControllerStubSettings clusterControllerSettings = clusterControllerSettingsBuilder.build();
- * 
- * 
+ * ClusterControllerStubSettings clusterControllerSettings = + * clusterControllerSettingsBuilder.build(); + * }
*/ -@Generated("by gapic-generator") @BetaApi +@Generated("by gapic-generator-java") public class ClusterControllerStubSettings extends StubSettings { /** The default scopes of the service. */ private static final ImmutableList DEFAULT_SERVICE_SCOPES = @@ -110,13 +113,66 @@ public class ClusterControllerStubSettings extends StubSettings deleteClusterSettings; private final OperationCallSettings deleteClusterOperationSettings; - private final UnaryCallSettings diagnoseClusterSettings; - private final OperationCallSettings - diagnoseClusterOperationSettings; private final UnaryCallSettings getClusterSettings; private final PagedCallSettings< ListClustersRequest, ListClustersResponse, ListClustersPagedResponse> listClustersSettings; + private final UnaryCallSettings diagnoseClusterSettings; + private final OperationCallSettings + diagnoseClusterOperationSettings; + + private static final PagedListDescriptor + LIST_CLUSTERS_PAGE_STR_DESC = + new PagedListDescriptor() { + @Override + public String emptyToken() { + return ""; + } + + @Override + public ListClustersRequest injectToken(ListClustersRequest payload, String token) { + return ListClustersRequest.newBuilder(payload).setPageToken(token).build(); + } + + @Override + public ListClustersRequest injectPageSize(ListClustersRequest payload, int pageSize) { + return ListClustersRequest.newBuilder(payload).setPageSize(pageSize).build(); + } + + @Override + public Integer extractPageSize(ListClustersRequest payload) { + return payload.getPageSize(); + } + + @Override + public String extractNextToken(ListClustersResponse payload) { + return payload.getNextPageToken(); + } + + @Override + public Iterable extractResources(ListClustersResponse payload) { + return payload.getClustersList() == null + ? ImmutableList.of() + : payload.getClustersList(); + } + }; + + private static final PagedListResponseFactory< + ListClustersRequest, ListClustersResponse, ListClustersPagedResponse> + LIST_CLUSTERS_PAGE_STR_FACT = + new PagedListResponseFactory< + ListClustersRequest, ListClustersResponse, ListClustersPagedResponse>() { + @Override + public ApiFuture getFuturePagedResponse( + UnaryCallable callable, + ListClustersRequest request, + ApiCallContext context, + ApiFuture futureResponse) { + PageContext pageContext = + PageContext.create(callable, LIST_CLUSTERS_PAGE_STR_DESC, request, context); + return ListClustersPagedResponse.createAsync(pageContext, futureResponse); + } + }; /** Returns the object with the settings used for calls to createCluster. */ public UnaryCallSettings createClusterSettings() { @@ -124,7 +180,6 @@ public UnaryCallSettings createClusterSettings( } /** Returns the object with the settings used for calls to createCluster. */ - @BetaApi("The surface for use by generated code is not stable yet and may change in the future.") public OperationCallSettings createClusterOperationSettings() { return createClusterOperationSettings; @@ -136,7 +191,6 @@ public UnaryCallSettings updateClusterSettings( } /** Returns the object with the settings used for calls to updateCluster. */ - @BetaApi("The surface for use by generated code is not stable yet and may change in the future.") public OperationCallSettings updateClusterOperationSettings() { return updateClusterOperationSettings; @@ -148,24 +202,11 @@ public UnaryCallSettings deleteClusterSettings( } /** Returns the object with the settings used for calls to deleteCluster. */ - @BetaApi("The surface for use by generated code is not stable yet and may change in the future.") public OperationCallSettings deleteClusterOperationSettings() { return deleteClusterOperationSettings; } - /** Returns the object with the settings used for calls to diagnoseCluster. */ - public UnaryCallSettings diagnoseClusterSettings() { - return diagnoseClusterSettings; - } - - /** Returns the object with the settings used for calls to diagnoseCluster. */ - @BetaApi("The surface for use by generated code is not stable yet and may change in the future.") - public OperationCallSettings - diagnoseClusterOperationSettings() { - return diagnoseClusterOperationSettings; - } - /** Returns the object with the settings used for calls to getCluster. */ public UnaryCallSettings getClusterSettings() { return getClusterSettings; @@ -177,16 +218,27 @@ public UnaryCallSettings getClusterSettings() { return listClustersSettings; } + /** Returns the object with the settings used for calls to diagnoseCluster. */ + public UnaryCallSettings diagnoseClusterSettings() { + return diagnoseClusterSettings; + } + + /** Returns the object with the settings used for calls to diagnoseCluster. */ + public OperationCallSettings + diagnoseClusterOperationSettings() { + return diagnoseClusterOperationSettings; + } + @BetaApi("A restructuring of stub classes is planned, so this may break in the future") public ClusterControllerStub createStub() throws IOException { if (getTransportChannelProvider() .getTransportName() .equals(GrpcTransportChannel.getGrpcTransportName())) { return GrpcClusterControllerStub.create(this); - } else { - throw new UnsupportedOperationException( - "Transport not supported: " + getTransportChannelProvider().getTransportName()); } + throw new UnsupportedOperationException( + String.format( + "Transport not supported: %s", getTransportChannelProvider().getTransportName())); } /** Returns a builder for the default ExecutorProvider for this service. */ @@ -252,69 +304,15 @@ protected ClusterControllerStubSettings(Builder settingsBuilder) throws IOExcept updateClusterOperationSettings = settingsBuilder.updateClusterOperationSettings().build(); deleteClusterSettings = settingsBuilder.deleteClusterSettings().build(); deleteClusterOperationSettings = settingsBuilder.deleteClusterOperationSettings().build(); - diagnoseClusterSettings = settingsBuilder.diagnoseClusterSettings().build(); - diagnoseClusterOperationSettings = settingsBuilder.diagnoseClusterOperationSettings().build(); getClusterSettings = settingsBuilder.getClusterSettings().build(); listClustersSettings = settingsBuilder.listClustersSettings().build(); + diagnoseClusterSettings = settingsBuilder.diagnoseClusterSettings().build(); + diagnoseClusterOperationSettings = settingsBuilder.diagnoseClusterOperationSettings().build(); } - private static final PagedListDescriptor - LIST_CLUSTERS_PAGE_STR_DESC = - new PagedListDescriptor() { - @Override - public String emptyToken() { - return ""; - } - - @Override - public ListClustersRequest injectToken(ListClustersRequest payload, String token) { - return ListClustersRequest.newBuilder(payload).setPageToken(token).build(); - } - - @Override - public ListClustersRequest injectPageSize(ListClustersRequest payload, int pageSize) { - return ListClustersRequest.newBuilder(payload).setPageSize(pageSize).build(); - } - - @Override - public Integer extractPageSize(ListClustersRequest payload) { - return payload.getPageSize(); - } - - @Override - public String extractNextToken(ListClustersResponse payload) { - return payload.getNextPageToken(); - } - - @Override - public Iterable extractResources(ListClustersResponse payload) { - return payload.getClustersList() != null - ? payload.getClustersList() - : ImmutableList.of(); - } - }; - - private static final PagedListResponseFactory< - ListClustersRequest, ListClustersResponse, ListClustersPagedResponse> - LIST_CLUSTERS_PAGE_STR_FACT = - new PagedListResponseFactory< - ListClustersRequest, ListClustersResponse, ListClustersPagedResponse>() { - @Override - public ApiFuture getFuturePagedResponse( - UnaryCallable callable, - ListClustersRequest request, - ApiCallContext context, - ApiFuture futureResponse) { - PageContext pageContext = - PageContext.create(callable, LIST_CLUSTERS_PAGE_STR_DESC, request, context); - return ListClustersPagedResponse.createAsync(pageContext, futureResponse); - } - }; - /** Builder for ClusterControllerStubSettings. */ public static class Builder extends StubSettings.Builder { private final ImmutableList> unaryMethodSettingsBuilders; - private final UnaryCallSettings.Builder createClusterSettings; private final OperationCallSettings.Builder< CreateClusterRequest, Cluster, ClusterOperationMetadata> @@ -327,23 +325,24 @@ public static class Builder extends StubSettings.Builder deleteClusterOperationSettings; + private final UnaryCallSettings.Builder getClusterSettings; + private final PagedCallSettings.Builder< + ListClustersRequest, ListClustersResponse, ListClustersPagedResponse> + listClustersSettings; private final UnaryCallSettings.Builder diagnoseClusterSettings; private final OperationCallSettings.Builder< DiagnoseClusterRequest, Empty, ClusterOperationMetadata> diagnoseClusterOperationSettings; - private final UnaryCallSettings.Builder getClusterSettings; - private final PagedCallSettings.Builder< - ListClustersRequest, ListClustersResponse, ListClustersPagedResponse> - listClustersSettings; - private static final ImmutableMap> RETRYABLE_CODE_DEFINITIONS; static { ImmutableMap.Builder> definitions = ImmutableMap.builder(); - definitions.put("no_retry_codes", ImmutableSet.copyOf(Lists.newArrayList())); + definitions.put( + "retry_policy_2_codes", + ImmutableSet.copyOf(Lists.newArrayList(StatusCode.Code.UNAVAILABLE))); definitions.put( "retry_policy_3_codes", ImmutableSet.copyOf( @@ -351,9 +350,6 @@ public static class Builder extends StubSettings.BuildernewArrayList(StatusCode.Code.UNAVAILABLE))); RETRYABLE_CODE_DEFINITIONS = definitions.build(); } @@ -384,61 +380,74 @@ public static class Builder extends StubSettings.Builder>of( + createClusterSettings, + updateClusterSettings, + deleteClusterSettings, + getClusterSettings, + listClustersSettings, + diagnoseClusterSettings); + initDefaults(this); + } - listClustersSettings = PagedCallSettings.newBuilder(LIST_CLUSTERS_PAGE_STR_FACT); + protected Builder(ClusterControllerStubSettings settings) { + super(settings); + + createClusterSettings = settings.createClusterSettings.toBuilder(); + createClusterOperationSettings = settings.createClusterOperationSettings.toBuilder(); + updateClusterSettings = settings.updateClusterSettings.toBuilder(); + updateClusterOperationSettings = settings.updateClusterOperationSettings.toBuilder(); + deleteClusterSettings = settings.deleteClusterSettings.toBuilder(); + deleteClusterOperationSettings = settings.deleteClusterOperationSettings.toBuilder(); + getClusterSettings = settings.getClusterSettings.toBuilder(); + listClustersSettings = settings.listClustersSettings.toBuilder(); + diagnoseClusterSettings = settings.diagnoseClusterSettings.toBuilder(); + diagnoseClusterOperationSettings = settings.diagnoseClusterOperationSettings.toBuilder(); unaryMethodSettingsBuilders = ImmutableList.>of( createClusterSettings, updateClusterSettings, deleteClusterSettings, - diagnoseClusterSettings, getClusterSettings, - listClustersSettings); - - initDefaults(this); + listClustersSettings, + diagnoseClusterSettings); } private static Builder createDefault() { - Builder builder = new Builder((ClientContext) null); + Builder builder = new Builder(((ClientContext) null)); + builder.setTransportChannelProvider(defaultTransportChannelProvider()); builder.setCredentialsProvider(defaultCredentialsProviderBuilder().build()); builder.setInternalHeaderProvider(defaultApiClientHeaderProviderBuilder().build()); builder.setEndpoint(getDefaultEndpoint()); + return initDefaults(builder); } private static Builder initDefaults(Builder builder) { - builder .createClusterSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_2_codes")) @@ -454,11 +463,6 @@ private static Builder initDefaults(Builder builder) { .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_2_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_2_params")); - builder - .diagnoseClusterSettings() - .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_2_codes")) - .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_2_params")); - builder .getClusterSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_3_codes")) @@ -468,6 +472,12 @@ private static Builder initDefaults(Builder builder) { .listClustersSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_3_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_3_params")); + + builder + .diagnoseClusterSettings() + .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_2_codes")) + .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_2_params")); + builder .createClusterOperationSettings() .setInitialCallSettings( @@ -486,11 +496,12 @@ private static Builder initDefaults(Builder builder) { .setInitialRetryDelay(Duration.ofMillis(1000L)) .setRetryDelayMultiplier(2.0) .setMaxRetryDelay(Duration.ofMillis(10000L)) - .setInitialRpcTimeout(Duration.ZERO) // ignored - .setRpcTimeoutMultiplier(1.0) // ignored - .setMaxRpcTimeout(Duration.ZERO) // ignored + .setInitialRpcTimeout(Duration.ZERO) + .setRpcTimeoutMultiplier(1.0) + .setMaxRpcTimeout(Duration.ZERO) .setTotalTimeout(Duration.ofMillis(900000L)) .build())); + builder .updateClusterOperationSettings() .setInitialCallSettings( @@ -509,11 +520,12 @@ private static Builder initDefaults(Builder builder) { .setInitialRetryDelay(Duration.ofMillis(1000L)) .setRetryDelayMultiplier(2.0) .setMaxRetryDelay(Duration.ofMillis(10000L)) - .setInitialRpcTimeout(Duration.ZERO) // ignored - .setRpcTimeoutMultiplier(1.0) // ignored - .setMaxRpcTimeout(Duration.ZERO) // ignored + .setInitialRpcTimeout(Duration.ZERO) + .setRpcTimeoutMultiplier(1.0) + .setMaxRpcTimeout(Duration.ZERO) .setTotalTimeout(Duration.ofMillis(900000L)) .build())); + builder .deleteClusterOperationSettings() .setInitialCallSettings( @@ -532,11 +544,12 @@ private static Builder initDefaults(Builder builder) { .setInitialRetryDelay(Duration.ofMillis(1000L)) .setRetryDelayMultiplier(2.0) .setMaxRetryDelay(Duration.ofMillis(10000L)) - .setInitialRpcTimeout(Duration.ZERO) // ignored - .setRpcTimeoutMultiplier(1.0) // ignored - .setMaxRpcTimeout(Duration.ZERO) // ignored + .setInitialRpcTimeout(Duration.ZERO) + .setRpcTimeoutMultiplier(1.0) + .setMaxRpcTimeout(Duration.ZERO) .setTotalTimeout(Duration.ofMillis(900000L)) .build())); + builder .diagnoseClusterOperationSettings() .setInitialCallSettings( @@ -555,40 +568,16 @@ private static Builder initDefaults(Builder builder) { .setInitialRetryDelay(Duration.ofMillis(1000L)) .setRetryDelayMultiplier(2.0) .setMaxRetryDelay(Duration.ofMillis(10000L)) - .setInitialRpcTimeout(Duration.ZERO) // ignored - .setRpcTimeoutMultiplier(1.0) // ignored - .setMaxRpcTimeout(Duration.ZERO) // ignored + .setInitialRpcTimeout(Duration.ZERO) + .setRpcTimeoutMultiplier(1.0) + .setMaxRpcTimeout(Duration.ZERO) .setTotalTimeout(Duration.ofMillis(30000L)) .build())); return builder; } - protected Builder(ClusterControllerStubSettings settings) { - super(settings); - - createClusterSettings = settings.createClusterSettings.toBuilder(); - createClusterOperationSettings = settings.createClusterOperationSettings.toBuilder(); - updateClusterSettings = settings.updateClusterSettings.toBuilder(); - updateClusterOperationSettings = settings.updateClusterOperationSettings.toBuilder(); - deleteClusterSettings = settings.deleteClusterSettings.toBuilder(); - deleteClusterOperationSettings = settings.deleteClusterOperationSettings.toBuilder(); - diagnoseClusterSettings = settings.diagnoseClusterSettings.toBuilder(); - diagnoseClusterOperationSettings = settings.diagnoseClusterOperationSettings.toBuilder(); - getClusterSettings = settings.getClusterSettings.toBuilder(); - listClustersSettings = settings.listClustersSettings.toBuilder(); - - unaryMethodSettingsBuilders = - ImmutableList.>of( - createClusterSettings, - updateClusterSettings, - deleteClusterSettings, - diagnoseClusterSettings, - getClusterSettings, - listClustersSettings); - } - - // NEXT_MAJOR_VER: remove 'throws Exception' + // NEXT_MAJOR_VER: remove 'throws Exception'. /** * Applies the given settings updater function to all of the unary API methods in this service. * @@ -643,6 +632,18 @@ public UnaryCallSettings.Builder deleteClusterS return deleteClusterOperationSettings; } + /** Returns the builder for the settings used for calls to getCluster. */ + public UnaryCallSettings.Builder getClusterSettings() { + return getClusterSettings; + } + + /** Returns the builder for the settings used for calls to listClusters. */ + public PagedCallSettings.Builder< + ListClustersRequest, ListClustersResponse, ListClustersPagedResponse> + listClustersSettings() { + return listClustersSettings; + } + /** Returns the builder for the settings used for calls to diagnoseCluster. */ public UnaryCallSettings.Builder diagnoseClusterSettings() { return diagnoseClusterSettings; @@ -656,18 +657,6 @@ public UnaryCallSettings.Builder diagnoseClus return diagnoseClusterOperationSettings; } - /** Returns the builder for the settings used for calls to getCluster. */ - public UnaryCallSettings.Builder getClusterSettings() { - return getClusterSettings; - } - - /** Returns the builder for the settings used for calls to listClusters. */ - public PagedCallSettings.Builder< - ListClustersRequest, ListClustersResponse, ListClustersPagedResponse> - listClustersSettings() { - return listClustersSettings; - } - @Override public ClusterControllerStubSettings build() throws IOException { return new ClusterControllerStubSettings(this); diff --git a/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/stub/GrpcAutoscalingPolicyServiceCallableFactory.java b/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/stub/GrpcAutoscalingPolicyServiceCallableFactory.java index a49d57fe..01934f14 100644 --- a/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/stub/GrpcAutoscalingPolicyServiceCallableFactory.java +++ b/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/stub/GrpcAutoscalingPolicyServiceCallableFactory.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,9 +13,9 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.dataproc.v1beta2.stub; -import com.google.api.core.BetaApi; import com.google.api.gax.grpc.GrpcCallSettings; import com.google.api.gax.grpc.GrpcCallableFactory; import com.google.api.gax.grpc.GrpcStubCallableFactory; @@ -31,18 +31,19 @@ import com.google.api.gax.rpc.StreamingCallSettings; import com.google.api.gax.rpc.UnaryCallSettings; import com.google.api.gax.rpc.UnaryCallable; +import com.google.longrunning.Operation; import com.google.longrunning.stub.OperationsStub; import javax.annotation.Generated; -// AUTO-GENERATED DOCUMENTATION AND CLASS +// AUTO-GENERATED DOCUMENTATION AND CLASS. /** - * gRPC callable factory implementation for Cloud Dataproc API. + * gRPC callable factory implementation for the AutoscalingPolicyService service API. * *

This class is for advanced usage. */ @Generated("by gapic-generator") -@BetaApi("The surface for use by generated code is not stable yet and may change in the future.") public class GrpcAutoscalingPolicyServiceCallableFactory implements GrpcStubCallableFactory { + @Override public UnaryCallable createUnaryCallable( GrpcCallSettings grpcCallSettings, @@ -55,61 +56,58 @@ public UnaryCallable createUnaryCalla public UnaryCallable createPagedCallable( GrpcCallSettings grpcCallSettings, - PagedCallSettings pagedCallSettings, + PagedCallSettings callSettings, ClientContext clientContext) { - return GrpcCallableFactory.createPagedCallable( - grpcCallSettings, pagedCallSettings, clientContext); + return GrpcCallableFactory.createPagedCallable(grpcCallSettings, callSettings, clientContext); } @Override public UnaryCallable createBatchingCallable( GrpcCallSettings grpcCallSettings, - BatchingCallSettings batchingCallSettings, + BatchingCallSettings callSettings, ClientContext clientContext) { return GrpcCallableFactory.createBatchingCallable( - grpcCallSettings, batchingCallSettings, clientContext); + grpcCallSettings, callSettings, clientContext); } - @BetaApi( - "The surface for long-running operations is not stable yet and may change in the future.") @Override public OperationCallable createOperationCallable( - GrpcCallSettings grpcCallSettings, - OperationCallSettings operationCallSettings, + GrpcCallSettings grpcCallSettings, + OperationCallSettings callSettings, ClientContext clientContext, OperationsStub operationsStub) { return GrpcCallableFactory.createOperationCallable( - grpcCallSettings, operationCallSettings, clientContext, operationsStub); + grpcCallSettings, callSettings, clientContext, operationsStub); } @Override public BidiStreamingCallable createBidiStreamingCallable( GrpcCallSettings grpcCallSettings, - StreamingCallSettings streamingCallSettings, + StreamingCallSettings callSettings, ClientContext clientContext) { return GrpcCallableFactory.createBidiStreamingCallable( - grpcCallSettings, streamingCallSettings, clientContext); + grpcCallSettings, callSettings, clientContext); } @Override public ServerStreamingCallable createServerStreamingCallable( GrpcCallSettings grpcCallSettings, - ServerStreamingCallSettings streamingCallSettings, + ServerStreamingCallSettings callSettings, ClientContext clientContext) { return GrpcCallableFactory.createServerStreamingCallable( - grpcCallSettings, streamingCallSettings, clientContext); + grpcCallSettings, callSettings, clientContext); } @Override public ClientStreamingCallable createClientStreamingCallable( GrpcCallSettings grpcCallSettings, - StreamingCallSettings streamingCallSettings, + StreamingCallSettings callSettings, ClientContext clientContext) { return GrpcCallableFactory.createClientStreamingCallable( - grpcCallSettings, streamingCallSettings, clientContext); + grpcCallSettings, callSettings, clientContext); } } diff --git a/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/stub/GrpcAutoscalingPolicyServiceStub.java b/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/stub/GrpcAutoscalingPolicyServiceStub.java index 789dabd5..ca60a8ac 100644 --- a/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/stub/GrpcAutoscalingPolicyServiceStub.java +++ b/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/stub/GrpcAutoscalingPolicyServiceStub.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,11 +13,11 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.dataproc.v1beta2.stub; import static com.google.cloud.dataproc.v1beta2.AutoscalingPolicyServiceClient.ListAutoscalingPoliciesPagedResponse; -import com.google.api.core.BetaApi; import com.google.api.gax.core.BackgroundResource; import com.google.api.gax.core.BackgroundResourceAggregation; import com.google.api.gax.grpc.GrpcCallSettings; @@ -33,6 +33,7 @@ import com.google.cloud.dataproc.v1beta2.ListAutoscalingPoliciesResponse; import com.google.cloud.dataproc.v1beta2.UpdateAutoscalingPolicyRequest; import com.google.common.collect.ImmutableMap; +import com.google.longrunning.stub.GrpcOperationsStub; import com.google.protobuf.Empty; import io.grpc.MethodDescriptor; import io.grpc.protobuf.ProtoUtils; @@ -41,16 +42,14 @@ import java.util.concurrent.TimeUnit; import javax.annotation.Generated; -// AUTO-GENERATED DOCUMENTATION AND CLASS +// AUTO-GENERATED DOCUMENTATION AND CLASS. /** - * gRPC stub implementation for Cloud Dataproc API. + * gRPC stub implementation for the AutoscalingPolicyService service API. * *

This class is for advanced usage and reflects the underlying API directly. */ -@Generated("by gapic-generator") -@BetaApi("A restructuring of stub classes is planned, so this may break in the future") +@Generated("by gapic-generator-java") public class GrpcAutoscalingPolicyServiceStub extends AutoscalingPolicyServiceStub { - private static final MethodDescriptor createAutoscalingPolicyMethodDescriptor = MethodDescriptor.newBuilder() @@ -61,6 +60,7 @@ public class GrpcAutoscalingPolicyServiceStub extends AutoscalingPolicyServiceSt ProtoUtils.marshaller(CreateAutoscalingPolicyRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(AutoscalingPolicy.getDefaultInstance())) .build(); + private static final MethodDescriptor updateAutoscalingPolicyMethodDescriptor = MethodDescriptor.newBuilder() @@ -71,6 +71,7 @@ public class GrpcAutoscalingPolicyServiceStub extends AutoscalingPolicyServiceSt ProtoUtils.marshaller(UpdateAutoscalingPolicyRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(AutoscalingPolicy.getDefaultInstance())) .build(); + private static final MethodDescriptor getAutoscalingPolicyMethodDescriptor = MethodDescriptor.newBuilder() @@ -81,6 +82,7 @@ public class GrpcAutoscalingPolicyServiceStub extends AutoscalingPolicyServiceSt ProtoUtils.marshaller(GetAutoscalingPolicyRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(AutoscalingPolicy.getDefaultInstance())) .build(); + private static final MethodDescriptor< ListAutoscalingPoliciesRequest, ListAutoscalingPoliciesResponse> listAutoscalingPoliciesMethodDescriptor = @@ -94,6 +96,7 @@ public class GrpcAutoscalingPolicyServiceStub extends AutoscalingPolicyServiceSt .setResponseMarshaller( ProtoUtils.marshaller(ListAutoscalingPoliciesResponse.getDefaultInstance())) .build(); + private static final MethodDescriptor deleteAutoscalingPolicyMethodDescriptor = MethodDescriptor.newBuilder() @@ -105,8 +108,6 @@ public class GrpcAutoscalingPolicyServiceStub extends AutoscalingPolicyServiceSt .setResponseMarshaller(ProtoUtils.marshaller(Empty.getDefaultInstance())) .build(); - private final BackgroundResource backgroundResources; - private final UnaryCallable createAutoscalingPolicyCallable; private final UnaryCallable @@ -120,6 +121,8 @@ public class GrpcAutoscalingPolicyServiceStub extends AutoscalingPolicyServiceSt private final UnaryCallable deleteAutoscalingPolicyCallable; + private final BackgroundResource backgroundResources; + private final GrpcOperationsStub operationsStub; private final GrpcStubCallableFactory callableFactory; public static final GrpcAutoscalingPolicyServiceStub create( @@ -161,6 +164,7 @@ protected GrpcAutoscalingPolicyServiceStub( GrpcStubCallableFactory callableFactory) throws IOException { this.callableFactory = callableFactory; + this.operationsStub = GrpcOperationsStub.create(clientContext, callableFactory); GrpcCallSettings createAutoscalingPolicyTransportSettings = @@ -265,7 +269,12 @@ public Map extract(DeleteAutoscalingPolicyRequest request) { settings.deleteAutoscalingPolicySettings(), clientContext); - backgroundResources = new BackgroundResourceAggregation(clientContext.getBackgroundResources()); + this.backgroundResources = + new BackgroundResourceAggregation(clientContext.getBackgroundResources()); + } + + public GrpcOperationsStub getOperationsStub() { + return operationsStub; } public UnaryCallable @@ -283,16 +292,16 @@ public Map extract(DeleteAutoscalingPolicyRequest request) { return getAutoscalingPolicyCallable; } - public UnaryCallable - listAutoscalingPoliciesPagedCallable() { - return listAutoscalingPoliciesPagedCallable; - } - public UnaryCallable listAutoscalingPoliciesCallable() { return listAutoscalingPoliciesCallable; } + public UnaryCallable + listAutoscalingPoliciesPagedCallable() { + return listAutoscalingPoliciesPagedCallable; + } + public UnaryCallable deleteAutoscalingPolicyCallable() { return deleteAutoscalingPolicyCallable; } diff --git a/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/stub/GrpcClusterControllerCallableFactory.java b/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/stub/GrpcClusterControllerCallableFactory.java index 0ed5f4b0..e4fdf302 100644 --- a/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/stub/GrpcClusterControllerCallableFactory.java +++ b/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/stub/GrpcClusterControllerCallableFactory.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,9 +13,9 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.dataproc.v1beta2.stub; -import com.google.api.core.BetaApi; import com.google.api.gax.grpc.GrpcCallSettings; import com.google.api.gax.grpc.GrpcCallableFactory; import com.google.api.gax.grpc.GrpcStubCallableFactory; @@ -31,18 +31,19 @@ import com.google.api.gax.rpc.StreamingCallSettings; import com.google.api.gax.rpc.UnaryCallSettings; import com.google.api.gax.rpc.UnaryCallable; +import com.google.longrunning.Operation; import com.google.longrunning.stub.OperationsStub; import javax.annotation.Generated; -// AUTO-GENERATED DOCUMENTATION AND CLASS +// AUTO-GENERATED DOCUMENTATION AND CLASS. /** - * gRPC callable factory implementation for Cloud Dataproc API. + * gRPC callable factory implementation for the ClusterController service API. * *

This class is for advanced usage. */ @Generated("by gapic-generator") -@BetaApi("The surface for use by generated code is not stable yet and may change in the future.") public class GrpcClusterControllerCallableFactory implements GrpcStubCallableFactory { + @Override public UnaryCallable createUnaryCallable( GrpcCallSettings grpcCallSettings, @@ -55,61 +56,58 @@ public UnaryCallable createUnaryCalla public UnaryCallable createPagedCallable( GrpcCallSettings grpcCallSettings, - PagedCallSettings pagedCallSettings, + PagedCallSettings callSettings, ClientContext clientContext) { - return GrpcCallableFactory.createPagedCallable( - grpcCallSettings, pagedCallSettings, clientContext); + return GrpcCallableFactory.createPagedCallable(grpcCallSettings, callSettings, clientContext); } @Override public UnaryCallable createBatchingCallable( GrpcCallSettings grpcCallSettings, - BatchingCallSettings batchingCallSettings, + BatchingCallSettings callSettings, ClientContext clientContext) { return GrpcCallableFactory.createBatchingCallable( - grpcCallSettings, batchingCallSettings, clientContext); + grpcCallSettings, callSettings, clientContext); } - @BetaApi( - "The surface for long-running operations is not stable yet and may change in the future.") @Override public OperationCallable createOperationCallable( - GrpcCallSettings grpcCallSettings, - OperationCallSettings operationCallSettings, + GrpcCallSettings grpcCallSettings, + OperationCallSettings callSettings, ClientContext clientContext, OperationsStub operationsStub) { return GrpcCallableFactory.createOperationCallable( - grpcCallSettings, operationCallSettings, clientContext, operationsStub); + grpcCallSettings, callSettings, clientContext, operationsStub); } @Override public BidiStreamingCallable createBidiStreamingCallable( GrpcCallSettings grpcCallSettings, - StreamingCallSettings streamingCallSettings, + StreamingCallSettings callSettings, ClientContext clientContext) { return GrpcCallableFactory.createBidiStreamingCallable( - grpcCallSettings, streamingCallSettings, clientContext); + grpcCallSettings, callSettings, clientContext); } @Override public ServerStreamingCallable createServerStreamingCallable( GrpcCallSettings grpcCallSettings, - ServerStreamingCallSettings streamingCallSettings, + ServerStreamingCallSettings callSettings, ClientContext clientContext) { return GrpcCallableFactory.createServerStreamingCallable( - grpcCallSettings, streamingCallSettings, clientContext); + grpcCallSettings, callSettings, clientContext); } @Override public ClientStreamingCallable createClientStreamingCallable( GrpcCallSettings grpcCallSettings, - StreamingCallSettings streamingCallSettings, + StreamingCallSettings callSettings, ClientContext clientContext) { return GrpcCallableFactory.createClientStreamingCallable( - grpcCallSettings, streamingCallSettings, clientContext); + grpcCallSettings, callSettings, clientContext); } } diff --git a/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/stub/GrpcClusterControllerStub.java b/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/stub/GrpcClusterControllerStub.java index 402eb8f6..4e6f5b17 100644 --- a/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/stub/GrpcClusterControllerStub.java +++ b/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/stub/GrpcClusterControllerStub.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,17 +13,18 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.dataproc.v1beta2.stub; import static com.google.cloud.dataproc.v1beta2.ClusterControllerClient.ListClustersPagedResponse; -import com.google.api.core.BetaApi; import com.google.api.gax.core.BackgroundResource; import com.google.api.gax.core.BackgroundResourceAggregation; import com.google.api.gax.grpc.GrpcCallSettings; import com.google.api.gax.grpc.GrpcStubCallableFactory; import com.google.api.gax.rpc.ClientContext; import com.google.api.gax.rpc.OperationCallable; +import com.google.api.gax.rpc.RequestParamsExtractor; import com.google.api.gax.rpc.UnaryCallable; import com.google.cloud.dataproc.v1beta2.Cluster; import com.google.cloud.dataproc.v1beta2.ClusterOperationMetadata; @@ -34,25 +35,25 @@ import com.google.cloud.dataproc.v1beta2.ListClustersRequest; import com.google.cloud.dataproc.v1beta2.ListClustersResponse; import com.google.cloud.dataproc.v1beta2.UpdateClusterRequest; +import com.google.common.collect.ImmutableMap; import com.google.longrunning.Operation; import com.google.longrunning.stub.GrpcOperationsStub; import com.google.protobuf.Empty; import io.grpc.MethodDescriptor; import io.grpc.protobuf.ProtoUtils; import java.io.IOException; +import java.util.Map; import java.util.concurrent.TimeUnit; import javax.annotation.Generated; -// AUTO-GENERATED DOCUMENTATION AND CLASS +// AUTO-GENERATED DOCUMENTATION AND CLASS. /** - * gRPC stub implementation for Cloud Dataproc API. + * gRPC stub implementation for the ClusterController service API. * *

This class is for advanced usage and reflects the underlying API directly. */ -@Generated("by gapic-generator") -@BetaApi("A restructuring of stub classes is planned, so this may break in the future") +@Generated("by gapic-generator-java") public class GrpcClusterControllerStub extends ClusterControllerStub { - private static final MethodDescriptor createClusterMethodDescriptor = MethodDescriptor.newBuilder() @@ -62,6 +63,7 @@ public class GrpcClusterControllerStub extends ClusterControllerStub { ProtoUtils.marshaller(CreateClusterRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(Operation.getDefaultInstance())) .build(); + private static final MethodDescriptor updateClusterMethodDescriptor = MethodDescriptor.newBuilder() @@ -71,6 +73,7 @@ public class GrpcClusterControllerStub extends ClusterControllerStub { ProtoUtils.marshaller(UpdateClusterRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(Operation.getDefaultInstance())) .build(); + private static final MethodDescriptor deleteClusterMethodDescriptor = MethodDescriptor.newBuilder() @@ -80,15 +83,7 @@ public class GrpcClusterControllerStub extends ClusterControllerStub { ProtoUtils.marshaller(DeleteClusterRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(Operation.getDefaultInstance())) .build(); - private static final MethodDescriptor - diagnoseClusterMethodDescriptor = - MethodDescriptor.newBuilder() - .setType(MethodDescriptor.MethodType.UNARY) - .setFullMethodName("google.cloud.dataproc.v1beta2.ClusterController/DiagnoseCluster") - .setRequestMarshaller( - ProtoUtils.marshaller(DiagnoseClusterRequest.getDefaultInstance())) - .setResponseMarshaller(ProtoUtils.marshaller(Operation.getDefaultInstance())) - .build(); + private static final MethodDescriptor getClusterMethodDescriptor = MethodDescriptor.newBuilder() .setType(MethodDescriptor.MethodType.UNARY) @@ -96,6 +91,7 @@ public class GrpcClusterControllerStub extends ClusterControllerStub { .setRequestMarshaller(ProtoUtils.marshaller(GetClusterRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(Cluster.getDefaultInstance())) .build(); + private static final MethodDescriptor listClustersMethodDescriptor = MethodDescriptor.newBuilder() @@ -106,8 +102,15 @@ public class GrpcClusterControllerStub extends ClusterControllerStub { ProtoUtils.marshaller(ListClustersResponse.getDefaultInstance())) .build(); - private final BackgroundResource backgroundResources; - private final GrpcOperationsStub operationsStub; + private static final MethodDescriptor + diagnoseClusterMethodDescriptor = + MethodDescriptor.newBuilder() + .setType(MethodDescriptor.MethodType.UNARY) + .setFullMethodName("google.cloud.dataproc.v1beta2.ClusterController/DiagnoseCluster") + .setRequestMarshaller( + ProtoUtils.marshaller(DiagnoseClusterRequest.getDefaultInstance())) + .setResponseMarshaller(ProtoUtils.marshaller(Operation.getDefaultInstance())) + .build(); private final UnaryCallable createClusterCallable; private final OperationCallable @@ -118,14 +121,16 @@ public class GrpcClusterControllerStub extends ClusterControllerStub { private final UnaryCallable deleteClusterCallable; private final OperationCallable deleteClusterOperationCallable; - private final UnaryCallable diagnoseClusterCallable; - private final OperationCallable - diagnoseClusterOperationCallable; private final UnaryCallable getClusterCallable; private final UnaryCallable listClustersCallable; private final UnaryCallable listClustersPagedCallable; + private final UnaryCallable diagnoseClusterCallable; + private final OperationCallable + diagnoseClusterOperationCallable; + private final BackgroundResource backgroundResources; + private final GrpcOperationsStub operationsStub; private final GrpcStubCallableFactory callableFactory; public static final GrpcClusterControllerStub create(ClusterControllerStubSettings settings) @@ -171,26 +176,90 @@ protected GrpcClusterControllerStub( GrpcCallSettings createClusterTransportSettings = GrpcCallSettings.newBuilder() .setMethodDescriptor(createClusterMethodDescriptor) + .setParamsExtractor( + new RequestParamsExtractor() { + @Override + public Map extract(CreateClusterRequest request) { + ImmutableMap.Builder params = ImmutableMap.builder(); + params.put("project_id", String.valueOf(request.getProjectId())); + params.put("region", String.valueOf(request.getRegion())); + return params.build(); + } + }) .build(); GrpcCallSettings updateClusterTransportSettings = GrpcCallSettings.newBuilder() .setMethodDescriptor(updateClusterMethodDescriptor) + .setParamsExtractor( + new RequestParamsExtractor() { + @Override + public Map extract(UpdateClusterRequest request) { + ImmutableMap.Builder params = ImmutableMap.builder(); + params.put("cluster_name", String.valueOf(request.getClusterName())); + params.put("project_id", String.valueOf(request.getProjectId())); + params.put("region", String.valueOf(request.getRegion())); + return params.build(); + } + }) .build(); GrpcCallSettings deleteClusterTransportSettings = GrpcCallSettings.newBuilder() .setMethodDescriptor(deleteClusterMethodDescriptor) - .build(); - GrpcCallSettings diagnoseClusterTransportSettings = - GrpcCallSettings.newBuilder() - .setMethodDescriptor(diagnoseClusterMethodDescriptor) + .setParamsExtractor( + new RequestParamsExtractor() { + @Override + public Map extract(DeleteClusterRequest request) { + ImmutableMap.Builder params = ImmutableMap.builder(); + params.put("cluster_name", String.valueOf(request.getClusterName())); + params.put("project_id", String.valueOf(request.getProjectId())); + params.put("region", String.valueOf(request.getRegion())); + return params.build(); + } + }) .build(); GrpcCallSettings getClusterTransportSettings = GrpcCallSettings.newBuilder() .setMethodDescriptor(getClusterMethodDescriptor) + .setParamsExtractor( + new RequestParamsExtractor() { + @Override + public Map extract(GetClusterRequest request) { + ImmutableMap.Builder params = ImmutableMap.builder(); + params.put("cluster_name", String.valueOf(request.getClusterName())); + params.put("project_id", String.valueOf(request.getProjectId())); + params.put("region", String.valueOf(request.getRegion())); + return params.build(); + } + }) .build(); GrpcCallSettings listClustersTransportSettings = GrpcCallSettings.newBuilder() .setMethodDescriptor(listClustersMethodDescriptor) + .setParamsExtractor( + new RequestParamsExtractor() { + @Override + public Map extract(ListClustersRequest request) { + ImmutableMap.Builder params = ImmutableMap.builder(); + params.put("project_id", String.valueOf(request.getProjectId())); + params.put("region", String.valueOf(request.getRegion())); + return params.build(); + } + }) + .build(); + GrpcCallSettings diagnoseClusterTransportSettings = + GrpcCallSettings.newBuilder() + .setMethodDescriptor(diagnoseClusterMethodDescriptor) + .setParamsExtractor( + new RequestParamsExtractor() { + @Override + public Map extract(DiagnoseClusterRequest request) { + ImmutableMap.Builder params = ImmutableMap.builder(); + params.put("cluster_name", String.valueOf(request.getClusterName())); + params.put("project_id", String.valueOf(request.getProjectId())); + params.put("region", String.valueOf(request.getRegion())); + return params.build(); + } + }) .build(); this.createClusterCallable = @@ -201,7 +270,7 @@ protected GrpcClusterControllerStub( createClusterTransportSettings, settings.createClusterOperationSettings(), clientContext, - this.operationsStub); + operationsStub); this.updateClusterCallable = callableFactory.createUnaryCallable( updateClusterTransportSettings, settings.updateClusterSettings(), clientContext); @@ -210,7 +279,7 @@ protected GrpcClusterControllerStub( updateClusterTransportSettings, settings.updateClusterOperationSettings(), clientContext, - this.operationsStub); + operationsStub); this.deleteClusterCallable = callableFactory.createUnaryCallable( deleteClusterTransportSettings, settings.deleteClusterSettings(), clientContext); @@ -219,16 +288,7 @@ protected GrpcClusterControllerStub( deleteClusterTransportSettings, settings.deleteClusterOperationSettings(), clientContext, - this.operationsStub); - this.diagnoseClusterCallable = - callableFactory.createUnaryCallable( - diagnoseClusterTransportSettings, settings.diagnoseClusterSettings(), clientContext); - this.diagnoseClusterOperationCallable = - callableFactory.createOperationCallable( - diagnoseClusterTransportSettings, - settings.diagnoseClusterOperationSettings(), - clientContext, - this.operationsStub); + operationsStub); this.getClusterCallable = callableFactory.createUnaryCallable( getClusterTransportSettings, settings.getClusterSettings(), clientContext); @@ -238,65 +298,70 @@ protected GrpcClusterControllerStub( this.listClustersPagedCallable = callableFactory.createPagedCallable( listClustersTransportSettings, settings.listClustersSettings(), clientContext); + this.diagnoseClusterCallable = + callableFactory.createUnaryCallable( + diagnoseClusterTransportSettings, settings.diagnoseClusterSettings(), clientContext); + this.diagnoseClusterOperationCallable = + callableFactory.createOperationCallable( + diagnoseClusterTransportSettings, + settings.diagnoseClusterOperationSettings(), + clientContext, + operationsStub); - backgroundResources = new BackgroundResourceAggregation(clientContext.getBackgroundResources()); + this.backgroundResources = + new BackgroundResourceAggregation(clientContext.getBackgroundResources()); } - @BetaApi("The surface for use by generated code is not stable yet and may change in the future.") public GrpcOperationsStub getOperationsStub() { return operationsStub; } - @BetaApi("The surface for use by generated code is not stable yet and may change in the future.") + public UnaryCallable createClusterCallable() { + return createClusterCallable; + } + public OperationCallable createClusterOperationCallable() { return createClusterOperationCallable; } - public UnaryCallable createClusterCallable() { - return createClusterCallable; + public UnaryCallable updateClusterCallable() { + return updateClusterCallable; } - @BetaApi("The surface for use by generated code is not stable yet and may change in the future.") public OperationCallable updateClusterOperationCallable() { return updateClusterOperationCallable; } - public UnaryCallable updateClusterCallable() { - return updateClusterCallable; + public UnaryCallable deleteClusterCallable() { + return deleteClusterCallable; } - @BetaApi("The surface for use by generated code is not stable yet and may change in the future.") public OperationCallable deleteClusterOperationCallable() { return deleteClusterOperationCallable; } - public UnaryCallable deleteClusterCallable() { - return deleteClusterCallable; - } - - @BetaApi("The surface for use by generated code is not stable yet and may change in the future.") - public OperationCallable - diagnoseClusterOperationCallable() { - return diagnoseClusterOperationCallable; - } - - public UnaryCallable diagnoseClusterCallable() { - return diagnoseClusterCallable; - } - public UnaryCallable getClusterCallable() { return getClusterCallable; } + public UnaryCallable listClustersCallable() { + return listClustersCallable; + } + public UnaryCallable listClustersPagedCallable() { return listClustersPagedCallable; } - public UnaryCallable listClustersCallable() { - return listClustersCallable; + public UnaryCallable diagnoseClusterCallable() { + return diagnoseClusterCallable; + } + + public OperationCallable + diagnoseClusterOperationCallable() { + return diagnoseClusterOperationCallable; } @Override diff --git a/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/stub/GrpcJobControllerCallableFactory.java b/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/stub/GrpcJobControllerCallableFactory.java index 041e766c..c2f5415a 100644 --- a/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/stub/GrpcJobControllerCallableFactory.java +++ b/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/stub/GrpcJobControllerCallableFactory.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,9 +13,9 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.dataproc.v1beta2.stub; -import com.google.api.core.BetaApi; import com.google.api.gax.grpc.GrpcCallSettings; import com.google.api.gax.grpc.GrpcCallableFactory; import com.google.api.gax.grpc.GrpcStubCallableFactory; @@ -31,18 +31,19 @@ import com.google.api.gax.rpc.StreamingCallSettings; import com.google.api.gax.rpc.UnaryCallSettings; import com.google.api.gax.rpc.UnaryCallable; +import com.google.longrunning.Operation; import com.google.longrunning.stub.OperationsStub; import javax.annotation.Generated; -// AUTO-GENERATED DOCUMENTATION AND CLASS +// AUTO-GENERATED DOCUMENTATION AND CLASS. /** - * gRPC callable factory implementation for Cloud Dataproc API. + * gRPC callable factory implementation for the JobController service API. * *

This class is for advanced usage. */ @Generated("by gapic-generator") -@BetaApi("The surface for use by generated code is not stable yet and may change in the future.") public class GrpcJobControllerCallableFactory implements GrpcStubCallableFactory { + @Override public UnaryCallable createUnaryCallable( GrpcCallSettings grpcCallSettings, @@ -55,61 +56,58 @@ public UnaryCallable createUnaryCalla public UnaryCallable createPagedCallable( GrpcCallSettings grpcCallSettings, - PagedCallSettings pagedCallSettings, + PagedCallSettings callSettings, ClientContext clientContext) { - return GrpcCallableFactory.createPagedCallable( - grpcCallSettings, pagedCallSettings, clientContext); + return GrpcCallableFactory.createPagedCallable(grpcCallSettings, callSettings, clientContext); } @Override public UnaryCallable createBatchingCallable( GrpcCallSettings grpcCallSettings, - BatchingCallSettings batchingCallSettings, + BatchingCallSettings callSettings, ClientContext clientContext) { return GrpcCallableFactory.createBatchingCallable( - grpcCallSettings, batchingCallSettings, clientContext); + grpcCallSettings, callSettings, clientContext); } - @BetaApi( - "The surface for long-running operations is not stable yet and may change in the future.") @Override public OperationCallable createOperationCallable( - GrpcCallSettings grpcCallSettings, - OperationCallSettings operationCallSettings, + GrpcCallSettings grpcCallSettings, + OperationCallSettings callSettings, ClientContext clientContext, OperationsStub operationsStub) { return GrpcCallableFactory.createOperationCallable( - grpcCallSettings, operationCallSettings, clientContext, operationsStub); + grpcCallSettings, callSettings, clientContext, operationsStub); } @Override public BidiStreamingCallable createBidiStreamingCallable( GrpcCallSettings grpcCallSettings, - StreamingCallSettings streamingCallSettings, + StreamingCallSettings callSettings, ClientContext clientContext) { return GrpcCallableFactory.createBidiStreamingCallable( - grpcCallSettings, streamingCallSettings, clientContext); + grpcCallSettings, callSettings, clientContext); } @Override public ServerStreamingCallable createServerStreamingCallable( GrpcCallSettings grpcCallSettings, - ServerStreamingCallSettings streamingCallSettings, + ServerStreamingCallSettings callSettings, ClientContext clientContext) { return GrpcCallableFactory.createServerStreamingCallable( - grpcCallSettings, streamingCallSettings, clientContext); + grpcCallSettings, callSettings, clientContext); } @Override public ClientStreamingCallable createClientStreamingCallable( GrpcCallSettings grpcCallSettings, - StreamingCallSettings streamingCallSettings, + StreamingCallSettings callSettings, ClientContext clientContext) { return GrpcCallableFactory.createClientStreamingCallable( - grpcCallSettings, streamingCallSettings, clientContext); + grpcCallSettings, callSettings, clientContext); } } diff --git a/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/stub/GrpcJobControllerStub.java b/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/stub/GrpcJobControllerStub.java index cf798a51..832b083f 100644 --- a/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/stub/GrpcJobControllerStub.java +++ b/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/stub/GrpcJobControllerStub.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,17 +13,18 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.dataproc.v1beta2.stub; import static com.google.cloud.dataproc.v1beta2.JobControllerClient.ListJobsPagedResponse; -import com.google.api.core.BetaApi; import com.google.api.gax.core.BackgroundResource; import com.google.api.gax.core.BackgroundResourceAggregation; import com.google.api.gax.grpc.GrpcCallSettings; import com.google.api.gax.grpc.GrpcStubCallableFactory; import com.google.api.gax.rpc.ClientContext; import com.google.api.gax.rpc.OperationCallable; +import com.google.api.gax.rpc.RequestParamsExtractor; import com.google.api.gax.rpc.UnaryCallable; import com.google.cloud.dataproc.v1beta2.CancelJobRequest; import com.google.cloud.dataproc.v1beta2.DeleteJobRequest; @@ -34,25 +35,25 @@ import com.google.cloud.dataproc.v1beta2.ListJobsResponse; import com.google.cloud.dataproc.v1beta2.SubmitJobRequest; import com.google.cloud.dataproc.v1beta2.UpdateJobRequest; +import com.google.common.collect.ImmutableMap; import com.google.longrunning.Operation; import com.google.longrunning.stub.GrpcOperationsStub; import com.google.protobuf.Empty; import io.grpc.MethodDescriptor; import io.grpc.protobuf.ProtoUtils; import java.io.IOException; +import java.util.Map; import java.util.concurrent.TimeUnit; import javax.annotation.Generated; -// AUTO-GENERATED DOCUMENTATION AND CLASS +// AUTO-GENERATED DOCUMENTATION AND CLASS. /** - * gRPC stub implementation for Cloud Dataproc API. + * gRPC stub implementation for the JobController service API. * *

This class is for advanced usage and reflects the underlying API directly. */ -@Generated("by gapic-generator") -@BetaApi("A restructuring of stub classes is planned, so this may break in the future") +@Generated("by gapic-generator-java") public class GrpcJobControllerStub extends JobControllerStub { - private static final MethodDescriptor submitJobMethodDescriptor = MethodDescriptor.newBuilder() .setType(MethodDescriptor.MethodType.UNARY) @@ -60,6 +61,7 @@ public class GrpcJobControllerStub extends JobControllerStub { .setRequestMarshaller(ProtoUtils.marshaller(SubmitJobRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(Job.getDefaultInstance())) .build(); + private static final MethodDescriptor submitJobAsOperationMethodDescriptor = MethodDescriptor.newBuilder() @@ -68,6 +70,7 @@ public class GrpcJobControllerStub extends JobControllerStub { .setRequestMarshaller(ProtoUtils.marshaller(SubmitJobRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(Operation.getDefaultInstance())) .build(); + private static final MethodDescriptor getJobMethodDescriptor = MethodDescriptor.newBuilder() .setType(MethodDescriptor.MethodType.UNARY) @@ -75,6 +78,7 @@ public class GrpcJobControllerStub extends JobControllerStub { .setRequestMarshaller(ProtoUtils.marshaller(GetJobRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(Job.getDefaultInstance())) .build(); + private static final MethodDescriptor listJobsMethodDescriptor = MethodDescriptor.newBuilder() @@ -83,6 +87,7 @@ public class GrpcJobControllerStub extends JobControllerStub { .setRequestMarshaller(ProtoUtils.marshaller(ListJobsRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(ListJobsResponse.getDefaultInstance())) .build(); + private static final MethodDescriptor updateJobMethodDescriptor = MethodDescriptor.newBuilder() .setType(MethodDescriptor.MethodType.UNARY) @@ -90,6 +95,7 @@ public class GrpcJobControllerStub extends JobControllerStub { .setRequestMarshaller(ProtoUtils.marshaller(UpdateJobRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(Job.getDefaultInstance())) .build(); + private static final MethodDescriptor cancelJobMethodDescriptor = MethodDescriptor.newBuilder() .setType(MethodDescriptor.MethodType.UNARY) @@ -97,6 +103,7 @@ public class GrpcJobControllerStub extends JobControllerStub { .setRequestMarshaller(ProtoUtils.marshaller(CancelJobRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(Job.getDefaultInstance())) .build(); + private static final MethodDescriptor deleteJobMethodDescriptor = MethodDescriptor.newBuilder() .setType(MethodDescriptor.MethodType.UNARY) @@ -105,9 +112,6 @@ public class GrpcJobControllerStub extends JobControllerStub { .setResponseMarshaller(ProtoUtils.marshaller(Empty.getDefaultInstance())) .build(); - private final BackgroundResource backgroundResources; - private final GrpcOperationsStub operationsStub; - private final UnaryCallable submitJobCallable; private final UnaryCallable submitJobAsOperationCallable; private final OperationCallable @@ -119,6 +123,8 @@ public class GrpcJobControllerStub extends JobControllerStub { private final UnaryCallable cancelJobCallable; private final UnaryCallable deleteJobCallable; + private final BackgroundResource backgroundResources; + private final GrpcOperationsStub operationsStub; private final GrpcStubCallableFactory callableFactory; public static final GrpcJobControllerStub create(JobControllerStubSettings settings) @@ -162,30 +168,104 @@ protected GrpcJobControllerStub( GrpcCallSettings submitJobTransportSettings = GrpcCallSettings.newBuilder() .setMethodDescriptor(submitJobMethodDescriptor) + .setParamsExtractor( + new RequestParamsExtractor() { + @Override + public Map extract(SubmitJobRequest request) { + ImmutableMap.Builder params = ImmutableMap.builder(); + params.put("project_id", String.valueOf(request.getProjectId())); + params.put("region", String.valueOf(request.getRegion())); + return params.build(); + } + }) .build(); GrpcCallSettings submitJobAsOperationTransportSettings = GrpcCallSettings.newBuilder() .setMethodDescriptor(submitJobAsOperationMethodDescriptor) + .setParamsExtractor( + new RequestParamsExtractor() { + @Override + public Map extract(SubmitJobRequest request) { + ImmutableMap.Builder params = ImmutableMap.builder(); + params.put("project_id", String.valueOf(request.getProjectId())); + params.put("region", String.valueOf(request.getRegion())); + return params.build(); + } + }) .build(); GrpcCallSettings getJobTransportSettings = GrpcCallSettings.newBuilder() .setMethodDescriptor(getJobMethodDescriptor) + .setParamsExtractor( + new RequestParamsExtractor() { + @Override + public Map extract(GetJobRequest request) { + ImmutableMap.Builder params = ImmutableMap.builder(); + params.put("job_id", String.valueOf(request.getJobId())); + params.put("project_id", String.valueOf(request.getProjectId())); + params.put("region", String.valueOf(request.getRegion())); + return params.build(); + } + }) .build(); GrpcCallSettings listJobsTransportSettings = GrpcCallSettings.newBuilder() .setMethodDescriptor(listJobsMethodDescriptor) + .setParamsExtractor( + new RequestParamsExtractor() { + @Override + public Map extract(ListJobsRequest request) { + ImmutableMap.Builder params = ImmutableMap.builder(); + params.put("project_id", String.valueOf(request.getProjectId())); + params.put("region", String.valueOf(request.getRegion())); + return params.build(); + } + }) .build(); GrpcCallSettings updateJobTransportSettings = GrpcCallSettings.newBuilder() .setMethodDescriptor(updateJobMethodDescriptor) + .setParamsExtractor( + new RequestParamsExtractor() { + @Override + public Map extract(UpdateJobRequest request) { + ImmutableMap.Builder params = ImmutableMap.builder(); + params.put("job_id", String.valueOf(request.getJobId())); + params.put("project_id", String.valueOf(request.getProjectId())); + params.put("region", String.valueOf(request.getRegion())); + return params.build(); + } + }) .build(); GrpcCallSettings cancelJobTransportSettings = GrpcCallSettings.newBuilder() .setMethodDescriptor(cancelJobMethodDescriptor) + .setParamsExtractor( + new RequestParamsExtractor() { + @Override + public Map extract(CancelJobRequest request) { + ImmutableMap.Builder params = ImmutableMap.builder(); + params.put("job_id", String.valueOf(request.getJobId())); + params.put("project_id", String.valueOf(request.getProjectId())); + params.put("region", String.valueOf(request.getRegion())); + return params.build(); + } + }) .build(); GrpcCallSettings deleteJobTransportSettings = GrpcCallSettings.newBuilder() .setMethodDescriptor(deleteJobMethodDescriptor) + .setParamsExtractor( + new RequestParamsExtractor() { + @Override + public Map extract(DeleteJobRequest request) { + ImmutableMap.Builder params = ImmutableMap.builder(); + params.put("job_id", String.valueOf(request.getJobId())); + params.put("project_id", String.valueOf(request.getProjectId())); + params.put("region", String.valueOf(request.getRegion())); + return params.build(); + } + }) .build(); this.submitJobCallable = @@ -201,7 +281,7 @@ protected GrpcJobControllerStub( submitJobAsOperationTransportSettings, settings.submitJobAsOperationOperationSettings(), clientContext, - this.operationsStub); + operationsStub); this.getJobCallable = callableFactory.createUnaryCallable( getJobTransportSettings, settings.getJobSettings(), clientContext); @@ -221,10 +301,10 @@ protected GrpcJobControllerStub( callableFactory.createUnaryCallable( deleteJobTransportSettings, settings.deleteJobSettings(), clientContext); - backgroundResources = new BackgroundResourceAggregation(clientContext.getBackgroundResources()); + this.backgroundResources = + new BackgroundResourceAggregation(clientContext.getBackgroundResources()); } - @BetaApi("The surface for use by generated code is not stable yet and may change in the future.") public GrpcOperationsStub getOperationsStub() { return operationsStub; } @@ -233,28 +313,27 @@ public UnaryCallable submitJobCallable() { return submitJobCallable; } - @BetaApi("The surface for use by generated code is not stable yet and may change in the future.") + public UnaryCallable submitJobAsOperationCallable() { + return submitJobAsOperationCallable; + } + public OperationCallable submitJobAsOperationOperationCallable() { return submitJobAsOperationOperationCallable; } - public UnaryCallable submitJobAsOperationCallable() { - return submitJobAsOperationCallable; - } - public UnaryCallable getJobCallable() { return getJobCallable; } - public UnaryCallable listJobsPagedCallable() { - return listJobsPagedCallable; - } - public UnaryCallable listJobsCallable() { return listJobsCallable; } + public UnaryCallable listJobsPagedCallable() { + return listJobsPagedCallable; + } + public UnaryCallable updateJobCallable() { return updateJobCallable; } diff --git a/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/stub/GrpcWorkflowTemplateServiceCallableFactory.java b/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/stub/GrpcWorkflowTemplateServiceCallableFactory.java index c35eae9e..1fa7f756 100644 --- a/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/stub/GrpcWorkflowTemplateServiceCallableFactory.java +++ b/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/stub/GrpcWorkflowTemplateServiceCallableFactory.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,9 +13,9 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.dataproc.v1beta2.stub; -import com.google.api.core.BetaApi; import com.google.api.gax.grpc.GrpcCallSettings; import com.google.api.gax.grpc.GrpcCallableFactory; import com.google.api.gax.grpc.GrpcStubCallableFactory; @@ -31,18 +31,19 @@ import com.google.api.gax.rpc.StreamingCallSettings; import com.google.api.gax.rpc.UnaryCallSettings; import com.google.api.gax.rpc.UnaryCallable; +import com.google.longrunning.Operation; import com.google.longrunning.stub.OperationsStub; import javax.annotation.Generated; -// AUTO-GENERATED DOCUMENTATION AND CLASS +// AUTO-GENERATED DOCUMENTATION AND CLASS. /** - * gRPC callable factory implementation for Cloud Dataproc API. + * gRPC callable factory implementation for the WorkflowTemplateService service API. * *

This class is for advanced usage. */ @Generated("by gapic-generator") -@BetaApi("The surface for use by generated code is not stable yet and may change in the future.") public class GrpcWorkflowTemplateServiceCallableFactory implements GrpcStubCallableFactory { + @Override public UnaryCallable createUnaryCallable( GrpcCallSettings grpcCallSettings, @@ -55,61 +56,58 @@ public UnaryCallable createUnaryCalla public UnaryCallable createPagedCallable( GrpcCallSettings grpcCallSettings, - PagedCallSettings pagedCallSettings, + PagedCallSettings callSettings, ClientContext clientContext) { - return GrpcCallableFactory.createPagedCallable( - grpcCallSettings, pagedCallSettings, clientContext); + return GrpcCallableFactory.createPagedCallable(grpcCallSettings, callSettings, clientContext); } @Override public UnaryCallable createBatchingCallable( GrpcCallSettings grpcCallSettings, - BatchingCallSettings batchingCallSettings, + BatchingCallSettings callSettings, ClientContext clientContext) { return GrpcCallableFactory.createBatchingCallable( - grpcCallSettings, batchingCallSettings, clientContext); + grpcCallSettings, callSettings, clientContext); } - @BetaApi( - "The surface for long-running operations is not stable yet and may change in the future.") @Override public OperationCallable createOperationCallable( - GrpcCallSettings grpcCallSettings, - OperationCallSettings operationCallSettings, + GrpcCallSettings grpcCallSettings, + OperationCallSettings callSettings, ClientContext clientContext, OperationsStub operationsStub) { return GrpcCallableFactory.createOperationCallable( - grpcCallSettings, operationCallSettings, clientContext, operationsStub); + grpcCallSettings, callSettings, clientContext, operationsStub); } @Override public BidiStreamingCallable createBidiStreamingCallable( GrpcCallSettings grpcCallSettings, - StreamingCallSettings streamingCallSettings, + StreamingCallSettings callSettings, ClientContext clientContext) { return GrpcCallableFactory.createBidiStreamingCallable( - grpcCallSettings, streamingCallSettings, clientContext); + grpcCallSettings, callSettings, clientContext); } @Override public ServerStreamingCallable createServerStreamingCallable( GrpcCallSettings grpcCallSettings, - ServerStreamingCallSettings streamingCallSettings, + ServerStreamingCallSettings callSettings, ClientContext clientContext) { return GrpcCallableFactory.createServerStreamingCallable( - grpcCallSettings, streamingCallSettings, clientContext); + grpcCallSettings, callSettings, clientContext); } @Override public ClientStreamingCallable createClientStreamingCallable( GrpcCallSettings grpcCallSettings, - StreamingCallSettings streamingCallSettings, + StreamingCallSettings callSettings, ClientContext clientContext) { return GrpcCallableFactory.createClientStreamingCallable( - grpcCallSettings, streamingCallSettings, clientContext); + grpcCallSettings, callSettings, clientContext); } } diff --git a/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/stub/GrpcWorkflowTemplateServiceStub.java b/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/stub/GrpcWorkflowTemplateServiceStub.java index 0d14c4b8..d8700040 100644 --- a/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/stub/GrpcWorkflowTemplateServiceStub.java +++ b/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/stub/GrpcWorkflowTemplateServiceStub.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,11 +13,11 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.dataproc.v1beta2.stub; import static com.google.cloud.dataproc.v1beta2.WorkflowTemplateServiceClient.ListWorkflowTemplatesPagedResponse; -import com.google.api.core.BetaApi; import com.google.api.gax.core.BackgroundResource; import com.google.api.gax.core.BackgroundResourceAggregation; import com.google.api.gax.grpc.GrpcCallSettings; @@ -47,15 +47,35 @@ import java.util.concurrent.TimeUnit; import javax.annotation.Generated; -// AUTO-GENERATED DOCUMENTATION AND CLASS +// AUTO-GENERATED DOCUMENTATION AND CLASS. /** - * gRPC stub implementation for Cloud Dataproc API. + * gRPC stub implementation for the WorkflowTemplateService service API. * *

This class is for advanced usage and reflects the underlying API directly. */ -@Generated("by gapic-generator") -@BetaApi("A restructuring of stub classes is planned, so this may break in the future") +@Generated("by gapic-generator-java") public class GrpcWorkflowTemplateServiceStub extends WorkflowTemplateServiceStub { + private static final MethodDescriptor + createWorkflowTemplateMethodDescriptor = + MethodDescriptor.newBuilder() + .setType(MethodDescriptor.MethodType.UNARY) + .setFullMethodName( + "google.cloud.dataproc.v1beta2.WorkflowTemplateService/CreateWorkflowTemplate") + .setRequestMarshaller( + ProtoUtils.marshaller(CreateWorkflowTemplateRequest.getDefaultInstance())) + .setResponseMarshaller(ProtoUtils.marshaller(WorkflowTemplate.getDefaultInstance())) + .build(); + + private static final MethodDescriptor + getWorkflowTemplateMethodDescriptor = + MethodDescriptor.newBuilder() + .setType(MethodDescriptor.MethodType.UNARY) + .setFullMethodName( + "google.cloud.dataproc.v1beta2.WorkflowTemplateService/GetWorkflowTemplate") + .setRequestMarshaller( + ProtoUtils.marshaller(GetWorkflowTemplateRequest.getDefaultInstance())) + .setResponseMarshaller(ProtoUtils.marshaller(WorkflowTemplate.getDefaultInstance())) + .build(); private static final MethodDescriptor instantiateWorkflowTemplateMethodDescriptor = @@ -67,6 +87,7 @@ public class GrpcWorkflowTemplateServiceStub extends WorkflowTemplateServiceStub ProtoUtils.marshaller(InstantiateWorkflowTemplateRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(Operation.getDefaultInstance())) .build(); + private static final MethodDescriptor instantiateInlineWorkflowTemplateMethodDescriptor = MethodDescriptor.newBuilder() @@ -78,26 +99,7 @@ public class GrpcWorkflowTemplateServiceStub extends WorkflowTemplateServiceStub InstantiateInlineWorkflowTemplateRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(Operation.getDefaultInstance())) .build(); - private static final MethodDescriptor - createWorkflowTemplateMethodDescriptor = - MethodDescriptor.newBuilder() - .setType(MethodDescriptor.MethodType.UNARY) - .setFullMethodName( - "google.cloud.dataproc.v1beta2.WorkflowTemplateService/CreateWorkflowTemplate") - .setRequestMarshaller( - ProtoUtils.marshaller(CreateWorkflowTemplateRequest.getDefaultInstance())) - .setResponseMarshaller(ProtoUtils.marshaller(WorkflowTemplate.getDefaultInstance())) - .build(); - private static final MethodDescriptor - getWorkflowTemplateMethodDescriptor = - MethodDescriptor.newBuilder() - .setType(MethodDescriptor.MethodType.UNARY) - .setFullMethodName( - "google.cloud.dataproc.v1beta2.WorkflowTemplateService/GetWorkflowTemplate") - .setRequestMarshaller( - ProtoUtils.marshaller(GetWorkflowTemplateRequest.getDefaultInstance())) - .setResponseMarshaller(ProtoUtils.marshaller(WorkflowTemplate.getDefaultInstance())) - .build(); + private static final MethodDescriptor updateWorkflowTemplateMethodDescriptor = MethodDescriptor.newBuilder() @@ -108,6 +110,7 @@ public class GrpcWorkflowTemplateServiceStub extends WorkflowTemplateServiceStub ProtoUtils.marshaller(UpdateWorkflowTemplateRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(WorkflowTemplate.getDefaultInstance())) .build(); + private static final MethodDescriptor listWorkflowTemplatesMethodDescriptor = MethodDescriptor.newBuilder() @@ -119,6 +122,7 @@ public class GrpcWorkflowTemplateServiceStub extends WorkflowTemplateServiceStub .setResponseMarshaller( ProtoUtils.marshaller(ListWorkflowTemplatesResponse.getDefaultInstance())) .build(); + private static final MethodDescriptor deleteWorkflowTemplateMethodDescriptor = MethodDescriptor.newBuilder() @@ -130,9 +134,10 @@ public class GrpcWorkflowTemplateServiceStub extends WorkflowTemplateServiceStub .setResponseMarshaller(ProtoUtils.marshaller(Empty.getDefaultInstance())) .build(); - private final BackgroundResource backgroundResources; - private final GrpcOperationsStub operationsStub; - + private final UnaryCallable + createWorkflowTemplateCallable; + private final UnaryCallable + getWorkflowTemplateCallable; private final UnaryCallable instantiateWorkflowTemplateCallable; private final OperationCallable @@ -141,10 +146,6 @@ public class GrpcWorkflowTemplateServiceStub extends WorkflowTemplateServiceStub instantiateInlineWorkflowTemplateCallable; private final OperationCallable instantiateInlineWorkflowTemplateOperationCallable; - private final UnaryCallable - createWorkflowTemplateCallable; - private final UnaryCallable - getWorkflowTemplateCallable; private final UnaryCallable updateWorkflowTemplateCallable; private final UnaryCallable @@ -153,6 +154,8 @@ public class GrpcWorkflowTemplateServiceStub extends WorkflowTemplateServiceStub listWorkflowTemplatesPagedCallable; private final UnaryCallable deleteWorkflowTemplateCallable; + private final BackgroundResource backgroundResources; + private final GrpcOperationsStub operationsStub; private final GrpcStubCallableFactory callableFactory; public static final GrpcWorkflowTemplateServiceStub create( @@ -196,6 +199,34 @@ protected GrpcWorkflowTemplateServiceStub( this.callableFactory = callableFactory; this.operationsStub = GrpcOperationsStub.create(clientContext, callableFactory); + GrpcCallSettings + createWorkflowTemplateTransportSettings = + GrpcCallSettings.newBuilder() + .setMethodDescriptor(createWorkflowTemplateMethodDescriptor) + .setParamsExtractor( + new RequestParamsExtractor() { + @Override + public Map extract(CreateWorkflowTemplateRequest request) { + ImmutableMap.Builder params = ImmutableMap.builder(); + params.put("parent", String.valueOf(request.getParent())); + return params.build(); + } + }) + .build(); + GrpcCallSettings + getWorkflowTemplateTransportSettings = + GrpcCallSettings.newBuilder() + .setMethodDescriptor(getWorkflowTemplateMethodDescriptor) + .setParamsExtractor( + new RequestParamsExtractor() { + @Override + public Map extract(GetWorkflowTemplateRequest request) { + ImmutableMap.Builder params = ImmutableMap.builder(); + params.put("name", String.valueOf(request.getName())); + return params.build(); + } + }) + .build(); GrpcCallSettings instantiateWorkflowTemplateTransportSettings = GrpcCallSettings.newBuilder() @@ -226,34 +257,6 @@ public Map extract( } }) .build(); - GrpcCallSettings - createWorkflowTemplateTransportSettings = - GrpcCallSettings.newBuilder() - .setMethodDescriptor(createWorkflowTemplateMethodDescriptor) - .setParamsExtractor( - new RequestParamsExtractor() { - @Override - public Map extract(CreateWorkflowTemplateRequest request) { - ImmutableMap.Builder params = ImmutableMap.builder(); - params.put("parent", String.valueOf(request.getParent())); - return params.build(); - } - }) - .build(); - GrpcCallSettings - getWorkflowTemplateTransportSettings = - GrpcCallSettings.newBuilder() - .setMethodDescriptor(getWorkflowTemplateMethodDescriptor) - .setParamsExtractor( - new RequestParamsExtractor() { - @Override - public Map extract(GetWorkflowTemplateRequest request) { - ImmutableMap.Builder params = ImmutableMap.builder(); - params.put("name", String.valueOf(request.getName())); - return params.build(); - } - }) - .build(); GrpcCallSettings updateWorkflowTemplateTransportSettings = GrpcCallSettings.newBuilder() @@ -298,6 +301,16 @@ public Map extract(DeleteWorkflowTemplateRequest request) { }) .build(); + this.createWorkflowTemplateCallable = + callableFactory.createUnaryCallable( + createWorkflowTemplateTransportSettings, + settings.createWorkflowTemplateSettings(), + clientContext); + this.getWorkflowTemplateCallable = + callableFactory.createUnaryCallable( + getWorkflowTemplateTransportSettings, + settings.getWorkflowTemplateSettings(), + clientContext); this.instantiateWorkflowTemplateCallable = callableFactory.createUnaryCallable( instantiateWorkflowTemplateTransportSettings, @@ -308,7 +321,7 @@ public Map extract(DeleteWorkflowTemplateRequest request) { instantiateWorkflowTemplateTransportSettings, settings.instantiateWorkflowTemplateOperationSettings(), clientContext, - this.operationsStub); + operationsStub); this.instantiateInlineWorkflowTemplateCallable = callableFactory.createUnaryCallable( instantiateInlineWorkflowTemplateTransportSettings, @@ -319,17 +332,7 @@ public Map extract(DeleteWorkflowTemplateRequest request) { instantiateInlineWorkflowTemplateTransportSettings, settings.instantiateInlineWorkflowTemplateOperationSettings(), clientContext, - this.operationsStub); - this.createWorkflowTemplateCallable = - callableFactory.createUnaryCallable( - createWorkflowTemplateTransportSettings, - settings.createWorkflowTemplateSettings(), - clientContext); - this.getWorkflowTemplateCallable = - callableFactory.createUnaryCallable( - getWorkflowTemplateTransportSettings, - settings.getWorkflowTemplateSettings(), - clientContext); + operationsStub); this.updateWorkflowTemplateCallable = callableFactory.createUnaryCallable( updateWorkflowTemplateTransportSettings, @@ -351,18 +354,21 @@ public Map extract(DeleteWorkflowTemplateRequest request) { settings.deleteWorkflowTemplateSettings(), clientContext); - backgroundResources = new BackgroundResourceAggregation(clientContext.getBackgroundResources()); + this.backgroundResources = + new BackgroundResourceAggregation(clientContext.getBackgroundResources()); } - @BetaApi("The surface for use by generated code is not stable yet and may change in the future.") public GrpcOperationsStub getOperationsStub() { return operationsStub; } - @BetaApi("The surface for use by generated code is not stable yet and may change in the future.") - public OperationCallable - instantiateWorkflowTemplateOperationCallable() { - return instantiateWorkflowTemplateOperationCallable; + public UnaryCallable + createWorkflowTemplateCallable() { + return createWorkflowTemplateCallable; + } + + public UnaryCallable getWorkflowTemplateCallable() { + return getWorkflowTemplateCallable; } public UnaryCallable @@ -370,10 +376,9 @@ public GrpcOperationsStub getOperationsStub() { return instantiateWorkflowTemplateCallable; } - @BetaApi("The surface for use by generated code is not stable yet and may change in the future.") - public OperationCallable - instantiateInlineWorkflowTemplateOperationCallable() { - return instantiateInlineWorkflowTemplateOperationCallable; + public OperationCallable + instantiateWorkflowTemplateOperationCallable() { + return instantiateWorkflowTemplateOperationCallable; } public UnaryCallable @@ -381,13 +386,9 @@ public GrpcOperationsStub getOperationsStub() { return instantiateInlineWorkflowTemplateCallable; } - public UnaryCallable - createWorkflowTemplateCallable() { - return createWorkflowTemplateCallable; - } - - public UnaryCallable getWorkflowTemplateCallable() { - return getWorkflowTemplateCallable; + public OperationCallable + instantiateInlineWorkflowTemplateOperationCallable() { + return instantiateInlineWorkflowTemplateOperationCallable; } public UnaryCallable @@ -395,16 +396,16 @@ public UnaryCallable getWorkflowTe return updateWorkflowTemplateCallable; } - public UnaryCallable - listWorkflowTemplatesPagedCallable() { - return listWorkflowTemplatesPagedCallable; - } - public UnaryCallable listWorkflowTemplatesCallable() { return listWorkflowTemplatesCallable; } + public UnaryCallable + listWorkflowTemplatesPagedCallable() { + return listWorkflowTemplatesPagedCallable; + } + public UnaryCallable deleteWorkflowTemplateCallable() { return deleteWorkflowTemplateCallable; } diff --git a/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/stub/JobControllerStub.java b/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/stub/JobControllerStub.java index 98ecabe5..ce86e52f 100644 --- a/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/stub/JobControllerStub.java +++ b/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/stub/JobControllerStub.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,11 +13,11 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.dataproc.v1beta2.stub; import static com.google.cloud.dataproc.v1beta2.JobControllerClient.ListJobsPagedResponse; -import com.google.api.core.BetaApi; import com.google.api.gax.core.BackgroundResource; import com.google.api.gax.rpc.OperationCallable; import com.google.api.gax.rpc.UnaryCallable; @@ -35,17 +35,15 @@ import com.google.protobuf.Empty; import javax.annotation.Generated; -// AUTO-GENERATED DOCUMENTATION AND CLASS +// AUTO-GENERATED DOCUMENTATION AND CLASS. /** - * Base stub class for Cloud Dataproc API. + * Base stub class for the JobController service API. * *

This class is for advanced usage and reflects the underlying API directly. */ @Generated("by gapic-generator") -@BetaApi("A restructuring of stub classes is planned, so this may break in the future") public abstract class JobControllerStub implements BackgroundResource { - @BetaApi("The surface for use by generated code is not stable yet and may change in the future.") public OperationsStub getOperationsStub() { throw new UnsupportedOperationException("Not implemented: getOperationsStub()"); } @@ -54,7 +52,6 @@ public UnaryCallable submitJobCallable() { throw new UnsupportedOperationException("Not implemented: submitJobCallable()"); } - @BetaApi("The surface for use by generated code is not stable yet and may change in the future.") public OperationCallable submitJobAsOperationOperationCallable() { throw new UnsupportedOperationException( diff --git a/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/stub/JobControllerStubSettings.java b/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/stub/JobControllerStubSettings.java index ed45827a..e6b58847 100644 --- a/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/stub/JobControllerStubSettings.java +++ b/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/stub/JobControllerStubSettings.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,6 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.dataproc.v1beta2.stub; import static com.google.cloud.dataproc.v1beta2.JobControllerClient.ListJobsPagedResponse; @@ -63,7 +64,7 @@ import javax.annotation.Generated; import org.threeten.bp.Duration; -// AUTO-GENERATED DOCUMENTATION AND CLASS +// AUTO-GENERATED DOCUMENTATION AND CLASS. /** * Settings class to configure an instance of {@link JobControllerStub}. * @@ -80,22 +81,23 @@ * *

For example, to set the total timeout of submitJob to 30 seconds: * - *

- * 
+ * 
{@code
  * JobControllerStubSettings.Builder jobControllerSettingsBuilder =
  *     JobControllerStubSettings.newBuilder();
  * jobControllerSettingsBuilder
  *     .submitJobSettings()
  *     .setRetrySettings(
- *         jobControllerSettingsBuilder.submitJobSettings().getRetrySettings().toBuilder()
+ *         jobControllerSettingsBuilder
+ *             .submitJobSettings()
+ *             .getRetrySettings()
+ *             .toBuilder()
  *             .setTotalTimeout(Duration.ofSeconds(30))
  *             .build());
  * JobControllerStubSettings jobControllerSettings = jobControllerSettingsBuilder.build();
- * 
- * 
+ * }
*/ -@Generated("by gapic-generator") @BetaApi +@Generated("by gapic-generator-java") public class JobControllerStubSettings extends StubSettings { /** The default scopes of the service. */ private static final ImmutableList DEFAULT_SERVICE_SCOPES = @@ -112,18 +114,69 @@ public class JobControllerStubSettings extends StubSettings cancelJobSettings; private final UnaryCallSettings deleteJobSettings; + private static final PagedListDescriptor + LIST_JOBS_PAGE_STR_DESC = + new PagedListDescriptor() { + @Override + public String emptyToken() { + return ""; + } + + @Override + public ListJobsRequest injectToken(ListJobsRequest payload, String token) { + return ListJobsRequest.newBuilder(payload).setPageToken(token).build(); + } + + @Override + public ListJobsRequest injectPageSize(ListJobsRequest payload, int pageSize) { + return ListJobsRequest.newBuilder(payload).setPageSize(pageSize).build(); + } + + @Override + public Integer extractPageSize(ListJobsRequest payload) { + return payload.getPageSize(); + } + + @Override + public String extractNextToken(ListJobsResponse payload) { + return payload.getNextPageToken(); + } + + @Override + public Iterable extractResources(ListJobsResponse payload) { + return payload.getJobsList() == null + ? ImmutableList.of() + : payload.getJobsList(); + } + }; + + private static final PagedListResponseFactory< + ListJobsRequest, ListJobsResponse, ListJobsPagedResponse> + LIST_JOBS_PAGE_STR_FACT = + new PagedListResponseFactory() { + @Override + public ApiFuture getFuturePagedResponse( + UnaryCallable callable, + ListJobsRequest request, + ApiCallContext context, + ApiFuture futureResponse) { + PageContext pageContext = + PageContext.create(callable, LIST_JOBS_PAGE_STR_DESC, request, context); + return ListJobsPagedResponse.createAsync(pageContext, futureResponse); + } + }; + /** Returns the object with the settings used for calls to submitJob. */ public UnaryCallSettings submitJobSettings() { return submitJobSettings; } - /** Returns the object with the settings used for calls to submitJobAsOperation. */ + /** Returns the object with the settings used for calls to submitJobAs. */ public UnaryCallSettings submitJobAsOperationSettings() { return submitJobAsOperationSettings; } /** Returns the object with the settings used for calls to submitJobAsOperation. */ - @BetaApi("The surface for use by generated code is not stable yet and may change in the future.") public OperationCallSettings submitJobAsOperationOperationSettings() { return submitJobAsOperationOperationSettings; @@ -161,10 +214,10 @@ public JobControllerStub createStub() throws IOException { .getTransportName() .equals(GrpcTransportChannel.getGrpcTransportName())) { return GrpcJobControllerStub.create(this); - } else { - throw new UnsupportedOperationException( - "Transport not supported: " + getTransportChannelProvider().getTransportName()); } + throw new UnsupportedOperationException( + String.format( + "Transport not supported: %s", getTransportChannelProvider().getTransportName())); } /** Returns a builder for the default ExecutorProvider for this service. */ @@ -235,62 +288,9 @@ protected JobControllerStubSettings(Builder settingsBuilder) throws IOException deleteJobSettings = settingsBuilder.deleteJobSettings().build(); } - private static final PagedListDescriptor - LIST_JOBS_PAGE_STR_DESC = - new PagedListDescriptor() { - @Override - public String emptyToken() { - return ""; - } - - @Override - public ListJobsRequest injectToken(ListJobsRequest payload, String token) { - return ListJobsRequest.newBuilder(payload).setPageToken(token).build(); - } - - @Override - public ListJobsRequest injectPageSize(ListJobsRequest payload, int pageSize) { - return ListJobsRequest.newBuilder(payload).setPageSize(pageSize).build(); - } - - @Override - public Integer extractPageSize(ListJobsRequest payload) { - return payload.getPageSize(); - } - - @Override - public String extractNextToken(ListJobsResponse payload) { - return payload.getNextPageToken(); - } - - @Override - public Iterable extractResources(ListJobsResponse payload) { - return payload.getJobsList() != null - ? payload.getJobsList() - : ImmutableList.of(); - } - }; - - private static final PagedListResponseFactory< - ListJobsRequest, ListJobsResponse, ListJobsPagedResponse> - LIST_JOBS_PAGE_STR_FACT = - new PagedListResponseFactory() { - @Override - public ApiFuture getFuturePagedResponse( - UnaryCallable callable, - ListJobsRequest request, - ApiCallContext context, - ApiFuture futureResponse) { - PageContext pageContext = - PageContext.create(callable, LIST_JOBS_PAGE_STR_DESC, request, context); - return ListJobsPagedResponse.createAsync(pageContext, futureResponse); - } - }; - /** Builder for JobControllerStubSettings. */ public static class Builder extends StubSettings.Builder { private final ImmutableList> unaryMethodSettingsBuilders; - private final UnaryCallSettings.Builder submitJobSettings; private final UnaryCallSettings.Builder submitJobAsOperationSettings; @@ -303,37 +303,15 @@ public static class Builder extends StubSettings.Builder updateJobSettings; private final UnaryCallSettings.Builder cancelJobSettings; private final UnaryCallSettings.Builder deleteJobSettings; - private static final ImmutableMap> RETRYABLE_CODE_DEFINITIONS; static { ImmutableMap.Builder> definitions = ImmutableMap.builder(); - definitions.put( - "retry_policy_1_codes", - ImmutableSet.copyOf( - Lists.newArrayList( - StatusCode.Code.DEADLINE_EXCEEDED, StatusCode.Code.UNAVAILABLE))); definitions.put( "retry_policy_4_codes", ImmutableSet.copyOf(Lists.newArrayList(StatusCode.Code.UNAVAILABLE))); - definitions.put( - "retry_policy_6_codes", - ImmutableSet.copyOf(Lists.newArrayList(StatusCode.Code.UNAVAILABLE))); - definitions.put("no_retry_codes", ImmutableSet.copyOf(Lists.newArrayList())); - definitions.put( - "retry_policy_3_codes", - ImmutableSet.copyOf( - Lists.newArrayList( - StatusCode.Code.INTERNAL, - StatusCode.Code.DEADLINE_EXCEEDED, - StatusCode.Code.UNAVAILABLE))); - definitions.put( - "retry_policy_2_codes", - ImmutableSet.copyOf(Lists.newArrayList(StatusCode.Code.UNAVAILABLE))); - definitions.put( - "no_retry_1_codes", ImmutableSet.copyOf(Lists.newArrayList())); definitions.put( "retry_policy_5_codes", ImmutableSet.copyOf( @@ -341,13 +319,6 @@ public static class Builder extends StubSettings.BuildernewArrayList( - StatusCode.Code.DEADLINE_EXCEEDED, - StatusCode.Code.INTERNAL, - StatusCode.Code.UNAVAILABLE))); RETRYABLE_CODE_DEFINITIONS = definitions.build(); } @@ -356,61 +327,6 @@ public static class Builder extends StubSettings.Builder definitions = ImmutableMap.builder(); RetrySettings settings = null; - settings = - RetrySettings.newBuilder() - .setInitialRetryDelay(Duration.ofMillis(100L)) - .setRetryDelayMultiplier(1.3) - .setMaxRetryDelay(Duration.ofMillis(60000L)) - .setInitialRpcTimeout(Duration.ofMillis(600000L)) - .setRpcTimeoutMultiplier(1.0) - .setMaxRpcTimeout(Duration.ofMillis(600000L)) - .setTotalTimeout(Duration.ofMillis(600000L)) - .build(); - definitions.put("retry_policy_1_params", settings); - settings = - RetrySettings.newBuilder() - .setInitialRetryDelay(Duration.ofMillis(100L)) - .setRetryDelayMultiplier(1.3) - .setMaxRetryDelay(Duration.ofMillis(60000L)) - .setInitialRpcTimeout(Duration.ofMillis(600000L)) - .setRpcTimeoutMultiplier(1.0) - .setMaxRpcTimeout(Duration.ofMillis(600000L)) - .setTotalTimeout(Duration.ofMillis(600000L)) - .build(); - definitions.put("retry_policy_6_params", settings); - settings = - RetrySettings.newBuilder() - .setInitialRetryDelay(Duration.ofMillis(100L)) - .setRetryDelayMultiplier(1.3) - .setMaxRetryDelay(Duration.ofMillis(60000L)) - .setInitialRpcTimeout(Duration.ofMillis(300000L)) - .setRpcTimeoutMultiplier(1.0) - .setMaxRpcTimeout(Duration.ofMillis(300000L)) - .setTotalTimeout(Duration.ofMillis(300000L)) - .build(); - definitions.put("retry_policy_2_params", settings); - settings = - RetrySettings.newBuilder() - .setInitialRetryDelay(Duration.ofMillis(100L)) - .setRetryDelayMultiplier(1.3) - .setMaxRetryDelay(Duration.ofMillis(60000L)) - .setInitialRpcTimeout(Duration.ofMillis(300000L)) - .setRpcTimeoutMultiplier(1.0) - .setMaxRpcTimeout(Duration.ofMillis(300000L)) - .setTotalTimeout(Duration.ofMillis(300000L)) - .build(); - definitions.put("retry_policy_3_params", settings); - settings = - RetrySettings.newBuilder() - .setInitialRetryDelay(Duration.ofMillis(100L)) - .setRetryDelayMultiplier(1.3) - .setMaxRetryDelay(Duration.ofMillis(60000L)) - .setInitialRpcTimeout(Duration.ofMillis(600000L)) - .setRpcTimeoutMultiplier(1.0) - .setMaxRpcTimeout(Duration.ofMillis(600000L)) - .setTotalTimeout(Duration.ofMillis(600000L)) - .build(); - definitions.put("retry_policy_7_params", settings); settings = RetrySettings.newBuilder() .setInitialRetryDelay(Duration.ofMillis(100L)) @@ -421,7 +337,7 @@ public static class Builder extends StubSettings.Builder>of( + submitJobSettings, + submitJobAsOperationSettings, + getJobSettings, + listJobsSettings, + updateJobSettings, + cancelJobSettings, + deleteJobSettings); + } + private static Builder createDefault() { - Builder builder = new Builder((ClientContext) null); + Builder builder = new Builder(((ClientContext) null)); + builder.setTransportChannelProvider(defaultTransportChannelProvider()); builder.setCredentialsProvider(defaultCredentialsProviderBuilder().build()); builder.setInternalHeaderProvider(defaultApiClientHeaderProviderBuilder().build()); builder.setEndpoint(getDefaultEndpoint()); + return initDefaults(builder); } private static Builder initDefaults(Builder builder) { - builder .submitJobSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_4_codes")) @@ -527,6 +450,7 @@ private static Builder initDefaults(Builder builder) { .deleteJobSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_4_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_4_params")); + builder .submitJobAsOperationOperationSettings() .setInitialCallSettings( @@ -540,43 +464,19 @@ private static Builder initDefaults(Builder builder) { .setPollingAlgorithm( OperationTimedPollAlgorithm.create( RetrySettings.newBuilder() - .setInitialRetryDelay(Duration.ofMillis(500L)) + .setInitialRetryDelay(Duration.ofMillis(5000L)) .setRetryDelayMultiplier(1.5) - .setMaxRetryDelay(Duration.ofMillis(5000L)) - .setInitialRpcTimeout(Duration.ZERO) // ignored - .setRpcTimeoutMultiplier(1.0) // ignored - .setMaxRpcTimeout(Duration.ZERO) // ignored + .setMaxRetryDelay(Duration.ofMillis(45000L)) + .setInitialRpcTimeout(Duration.ZERO) + .setRpcTimeoutMultiplier(1.0) + .setMaxRpcTimeout(Duration.ZERO) .setTotalTimeout(Duration.ofMillis(300000L)) .build())); return builder; } - protected Builder(JobControllerStubSettings settings) { - super(settings); - - submitJobSettings = settings.submitJobSettings.toBuilder(); - submitJobAsOperationSettings = settings.submitJobAsOperationSettings.toBuilder(); - submitJobAsOperationOperationSettings = - settings.submitJobAsOperationOperationSettings.toBuilder(); - getJobSettings = settings.getJobSettings.toBuilder(); - listJobsSettings = settings.listJobsSettings.toBuilder(); - updateJobSettings = settings.updateJobSettings.toBuilder(); - cancelJobSettings = settings.cancelJobSettings.toBuilder(); - deleteJobSettings = settings.deleteJobSettings.toBuilder(); - - unaryMethodSettingsBuilders = - ImmutableList.>of( - submitJobSettings, - submitJobAsOperationSettings, - getJobSettings, - listJobsSettings, - updateJobSettings, - cancelJobSettings, - deleteJobSettings); - } - - // NEXT_MAJOR_VER: remove 'throws Exception' + // NEXT_MAJOR_VER: remove 'throws Exception'. /** * Applies the given settings updater function to all of the unary API methods in this service. * @@ -597,7 +497,7 @@ public UnaryCallSettings.Builder submitJobSettings() { return submitJobSettings; } - /** Returns the builder for the settings used for calls to submitJobAsOperation. */ + /** Returns the builder for the settings used for calls to submitJobAs. */ public UnaryCallSettings.Builder submitJobAsOperationSettings() { return submitJobAsOperationSettings; } diff --git a/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/stub/WorkflowTemplateServiceStub.java b/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/stub/WorkflowTemplateServiceStub.java index 16b566c7..5ccf9daf 100644 --- a/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/stub/WorkflowTemplateServiceStub.java +++ b/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/stub/WorkflowTemplateServiceStub.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,11 +13,11 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.dataproc.v1beta2.stub; import static com.google.cloud.dataproc.v1beta2.WorkflowTemplateServiceClient.ListWorkflowTemplatesPagedResponse; -import com.google.api.core.BetaApi; import com.google.api.gax.core.BackgroundResource; import com.google.api.gax.rpc.OperationCallable; import com.google.api.gax.rpc.UnaryCallable; @@ -36,22 +36,28 @@ import com.google.protobuf.Empty; import javax.annotation.Generated; -// AUTO-GENERATED DOCUMENTATION AND CLASS +// AUTO-GENERATED DOCUMENTATION AND CLASS. /** - * Base stub class for Cloud Dataproc API. + * Base stub class for the WorkflowTemplateService service API. * *

This class is for advanced usage and reflects the underlying API directly. */ @Generated("by gapic-generator") -@BetaApi("A restructuring of stub classes is planned, so this may break in the future") public abstract class WorkflowTemplateServiceStub implements BackgroundResource { - @BetaApi("The surface for use by generated code is not stable yet and may change in the future.") public OperationsStub getOperationsStub() { throw new UnsupportedOperationException("Not implemented: getOperationsStub()"); } - @BetaApi("The surface for use by generated code is not stable yet and may change in the future.") + public UnaryCallable + createWorkflowTemplateCallable() { + throw new UnsupportedOperationException("Not implemented: createWorkflowTemplateCallable()"); + } + + public UnaryCallable getWorkflowTemplateCallable() { + throw new UnsupportedOperationException("Not implemented: getWorkflowTemplateCallable()"); + } + public OperationCallable instantiateWorkflowTemplateOperationCallable() { throw new UnsupportedOperationException( @@ -64,7 +70,6 @@ public OperationsStub getOperationsStub() { "Not implemented: instantiateWorkflowTemplateCallable()"); } - @BetaApi("The surface for use by generated code is not stable yet and may change in the future.") public OperationCallable instantiateInlineWorkflowTemplateOperationCallable() { throw new UnsupportedOperationException( @@ -77,15 +82,6 @@ public OperationsStub getOperationsStub() { "Not implemented: instantiateInlineWorkflowTemplateCallable()"); } - public UnaryCallable - createWorkflowTemplateCallable() { - throw new UnsupportedOperationException("Not implemented: createWorkflowTemplateCallable()"); - } - - public UnaryCallable getWorkflowTemplateCallable() { - throw new UnsupportedOperationException("Not implemented: getWorkflowTemplateCallable()"); - } - public UnaryCallable updateWorkflowTemplateCallable() { throw new UnsupportedOperationException("Not implemented: updateWorkflowTemplateCallable()"); diff --git a/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/stub/WorkflowTemplateServiceStubSettings.java b/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/stub/WorkflowTemplateServiceStubSettings.java index e2d7f86e..0912dd69 100644 --- a/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/stub/WorkflowTemplateServiceStubSettings.java +++ b/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/stub/WorkflowTemplateServiceStubSettings.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,6 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.dataproc.v1beta2.stub; import static com.google.cloud.dataproc.v1beta2.WorkflowTemplateServiceClient.ListWorkflowTemplatesPagedResponse; @@ -64,7 +65,7 @@ import javax.annotation.Generated; import org.threeten.bp.Duration; -// AUTO-GENERATED DOCUMENTATION AND CLASS +// AUTO-GENERATED DOCUMENTATION AND CLASS. /** * Settings class to configure an instance of {@link WorkflowTemplateServiceStub}. * @@ -81,28 +82,34 @@ * *

For example, to set the total timeout of createWorkflowTemplate to 30 seconds: * - *

- * 
+ * 
{@code
  * WorkflowTemplateServiceStubSettings.Builder workflowTemplateServiceSettingsBuilder =
  *     WorkflowTemplateServiceStubSettings.newBuilder();
  * workflowTemplateServiceSettingsBuilder
  *     .createWorkflowTemplateSettings()
  *     .setRetrySettings(
- *         workflowTemplateServiceSettingsBuilder.createWorkflowTemplateSettings().getRetrySettings().toBuilder()
+ *         workflowTemplateServiceSettingsBuilder
+ *             .createWorkflowTemplateSettings()
+ *             .getRetrySettings()
+ *             .toBuilder()
  *             .setTotalTimeout(Duration.ofSeconds(30))
  *             .build());
- * WorkflowTemplateServiceStubSettings workflowTemplateServiceSettings = workflowTemplateServiceSettingsBuilder.build();
- * 
- * 
+ * WorkflowTemplateServiceStubSettings workflowTemplateServiceSettings = + * workflowTemplateServiceSettingsBuilder.build(); + * }
*/ -@Generated("by gapic-generator") @BetaApi +@Generated("by gapic-generator-java") public class WorkflowTemplateServiceStubSettings extends StubSettings { /** The default scopes of the service. */ private static final ImmutableList DEFAULT_SERVICE_SCOPES = ImmutableList.builder().add("https://www.googleapis.com/auth/cloud-platform").build(); + private final UnaryCallSettings + createWorkflowTemplateSettings; + private final UnaryCallSettings + getWorkflowTemplateSettings; private final UnaryCallSettings instantiateWorkflowTemplateSettings; private final OperationCallSettings @@ -112,10 +119,6 @@ public class WorkflowTemplateServiceStubSettings private final OperationCallSettings< InstantiateInlineWorkflowTemplateRequest, Empty, WorkflowMetadata> instantiateInlineWorkflowTemplateOperationSettings; - private final UnaryCallSettings - createWorkflowTemplateSettings; - private final UnaryCallSettings - getWorkflowTemplateSettings; private final UnaryCallSettings updateWorkflowTemplateSettings; private final PagedCallSettings< @@ -126,6 +129,83 @@ public class WorkflowTemplateServiceStubSettings private final UnaryCallSettings deleteWorkflowTemplateSettings; + private static final PagedListDescriptor< + ListWorkflowTemplatesRequest, ListWorkflowTemplatesResponse, WorkflowTemplate> + LIST_WORKFLOW_TEMPLATES_PAGE_STR_DESC = + new PagedListDescriptor< + ListWorkflowTemplatesRequest, ListWorkflowTemplatesResponse, WorkflowTemplate>() { + @Override + public String emptyToken() { + return ""; + } + + @Override + public ListWorkflowTemplatesRequest injectToken( + ListWorkflowTemplatesRequest payload, String token) { + return ListWorkflowTemplatesRequest.newBuilder(payload).setPageToken(token).build(); + } + + @Override + public ListWorkflowTemplatesRequest injectPageSize( + ListWorkflowTemplatesRequest payload, int pageSize) { + return ListWorkflowTemplatesRequest.newBuilder(payload).setPageSize(pageSize).build(); + } + + @Override + public Integer extractPageSize(ListWorkflowTemplatesRequest payload) { + return payload.getPageSize(); + } + + @Override + public String extractNextToken(ListWorkflowTemplatesResponse payload) { + return payload.getNextPageToken(); + } + + @Override + public Iterable extractResources( + ListWorkflowTemplatesResponse payload) { + return payload.getTemplatesList() == null + ? ImmutableList.of() + : payload.getTemplatesList(); + } + }; + + private static final PagedListResponseFactory< + ListWorkflowTemplatesRequest, + ListWorkflowTemplatesResponse, + ListWorkflowTemplatesPagedResponse> + LIST_WORKFLOW_TEMPLATES_PAGE_STR_FACT = + new PagedListResponseFactory< + ListWorkflowTemplatesRequest, + ListWorkflowTemplatesResponse, + ListWorkflowTemplatesPagedResponse>() { + @Override + public ApiFuture getFuturePagedResponse( + UnaryCallable callable, + ListWorkflowTemplatesRequest request, + ApiCallContext context, + ApiFuture futureResponse) { + PageContext< + ListWorkflowTemplatesRequest, ListWorkflowTemplatesResponse, WorkflowTemplate> + pageContext = + PageContext.create( + callable, LIST_WORKFLOW_TEMPLATES_PAGE_STR_DESC, request, context); + return ListWorkflowTemplatesPagedResponse.createAsync(pageContext, futureResponse); + } + }; + + /** Returns the object with the settings used for calls to createWorkflowTemplate. */ + public UnaryCallSettings + createWorkflowTemplateSettings() { + return createWorkflowTemplateSettings; + } + + /** Returns the object with the settings used for calls to getWorkflowTemplate. */ + public UnaryCallSettings + getWorkflowTemplateSettings() { + return getWorkflowTemplateSettings; + } + /** Returns the object with the settings used for calls to instantiateWorkflowTemplate. */ public UnaryCallSettings instantiateWorkflowTemplateSettings() { @@ -133,7 +213,6 @@ public class WorkflowTemplateServiceStubSettings } /** Returns the object with the settings used for calls to instantiateWorkflowTemplate. */ - @BetaApi("The surface for use by generated code is not stable yet and may change in the future.") public OperationCallSettings instantiateWorkflowTemplateOperationSettings() { return instantiateWorkflowTemplateOperationSettings; @@ -146,24 +225,11 @@ public class WorkflowTemplateServiceStubSettings } /** Returns the object with the settings used for calls to instantiateInlineWorkflowTemplate. */ - @BetaApi("The surface for use by generated code is not stable yet and may change in the future.") public OperationCallSettings instantiateInlineWorkflowTemplateOperationSettings() { return instantiateInlineWorkflowTemplateOperationSettings; } - /** Returns the object with the settings used for calls to createWorkflowTemplate. */ - public UnaryCallSettings - createWorkflowTemplateSettings() { - return createWorkflowTemplateSettings; - } - - /** Returns the object with the settings used for calls to getWorkflowTemplate. */ - public UnaryCallSettings - getWorkflowTemplateSettings() { - return getWorkflowTemplateSettings; - } - /** Returns the object with the settings used for calls to updateWorkflowTemplate. */ public UnaryCallSettings updateWorkflowTemplateSettings() { @@ -190,10 +256,10 @@ public WorkflowTemplateServiceStub createStub() throws IOException { .getTransportName() .equals(GrpcTransportChannel.getGrpcTransportName())) { return GrpcWorkflowTemplateServiceStub.create(this); - } else { - throw new UnsupportedOperationException( - "Transport not supported: " + getTransportChannelProvider().getTransportName()); } + throw new UnsupportedOperationException( + String.format( + "Transport not supported: %s", getTransportChannelProvider().getTransportName())); } /** Returns a builder for the default ExecutorProvider for this service. */ @@ -253,6 +319,8 @@ public Builder toBuilder() { protected WorkflowTemplateServiceStubSettings(Builder settingsBuilder) throws IOException { super(settingsBuilder); + createWorkflowTemplateSettings = settingsBuilder.createWorkflowTemplateSettings().build(); + getWorkflowTemplateSettings = settingsBuilder.getWorkflowTemplateSettings().build(); instantiateWorkflowTemplateSettings = settingsBuilder.instantiateWorkflowTemplateSettings().build(); instantiateWorkflowTemplateOperationSettings = @@ -261,83 +329,19 @@ protected WorkflowTemplateServiceStubSettings(Builder settingsBuilder) throws IO settingsBuilder.instantiateInlineWorkflowTemplateSettings().build(); instantiateInlineWorkflowTemplateOperationSettings = settingsBuilder.instantiateInlineWorkflowTemplateOperationSettings().build(); - createWorkflowTemplateSettings = settingsBuilder.createWorkflowTemplateSettings().build(); - getWorkflowTemplateSettings = settingsBuilder.getWorkflowTemplateSettings().build(); updateWorkflowTemplateSettings = settingsBuilder.updateWorkflowTemplateSettings().build(); listWorkflowTemplatesSettings = settingsBuilder.listWorkflowTemplatesSettings().build(); deleteWorkflowTemplateSettings = settingsBuilder.deleteWorkflowTemplateSettings().build(); } - private static final PagedListDescriptor< - ListWorkflowTemplatesRequest, ListWorkflowTemplatesResponse, WorkflowTemplate> - LIST_WORKFLOW_TEMPLATES_PAGE_STR_DESC = - new PagedListDescriptor< - ListWorkflowTemplatesRequest, ListWorkflowTemplatesResponse, WorkflowTemplate>() { - @Override - public String emptyToken() { - return ""; - } - - @Override - public ListWorkflowTemplatesRequest injectToken( - ListWorkflowTemplatesRequest payload, String token) { - return ListWorkflowTemplatesRequest.newBuilder(payload).setPageToken(token).build(); - } - - @Override - public ListWorkflowTemplatesRequest injectPageSize( - ListWorkflowTemplatesRequest payload, int pageSize) { - return ListWorkflowTemplatesRequest.newBuilder(payload).setPageSize(pageSize).build(); - } - - @Override - public Integer extractPageSize(ListWorkflowTemplatesRequest payload) { - return payload.getPageSize(); - } - - @Override - public String extractNextToken(ListWorkflowTemplatesResponse payload) { - return payload.getNextPageToken(); - } - - @Override - public Iterable extractResources( - ListWorkflowTemplatesResponse payload) { - return payload.getTemplatesList() != null - ? payload.getTemplatesList() - : ImmutableList.of(); - } - }; - - private static final PagedListResponseFactory< - ListWorkflowTemplatesRequest, - ListWorkflowTemplatesResponse, - ListWorkflowTemplatesPagedResponse> - LIST_WORKFLOW_TEMPLATES_PAGE_STR_FACT = - new PagedListResponseFactory< - ListWorkflowTemplatesRequest, - ListWorkflowTemplatesResponse, - ListWorkflowTemplatesPagedResponse>() { - @Override - public ApiFuture getFuturePagedResponse( - UnaryCallable callable, - ListWorkflowTemplatesRequest request, - ApiCallContext context, - ApiFuture futureResponse) { - PageContext< - ListWorkflowTemplatesRequest, ListWorkflowTemplatesResponse, WorkflowTemplate> - pageContext = - PageContext.create( - callable, LIST_WORKFLOW_TEMPLATES_PAGE_STR_DESC, request, context); - return ListWorkflowTemplatesPagedResponse.createAsync(pageContext, futureResponse); - } - }; - /** Builder for WorkflowTemplateServiceStubSettings. */ public static class Builder extends StubSettings.Builder { private final ImmutableList> unaryMethodSettingsBuilders; - + private final UnaryCallSettings.Builder + createWorkflowTemplateSettings; + private final UnaryCallSettings.Builder + getWorkflowTemplateSettings; private final UnaryCallSettings.Builder instantiateWorkflowTemplateSettings; private final OperationCallSettings.Builder< @@ -348,10 +352,6 @@ public static class Builder private final OperationCallSettings.Builder< InstantiateInlineWorkflowTemplateRequest, Empty, WorkflowMetadata> instantiateInlineWorkflowTemplateOperationSettings; - private final UnaryCallSettings.Builder - createWorkflowTemplateSettings; - private final UnaryCallSettings.Builder - getWorkflowTemplateSettings; private final UnaryCallSettings.Builder updateWorkflowTemplateSettings; private final PagedCallSettings.Builder< @@ -361,7 +361,6 @@ public static class Builder listWorkflowTemplatesSettings; private final UnaryCallSettings.Builder deleteWorkflowTemplateSettings; - private static final ImmutableMap> RETRYABLE_CODE_DEFINITIONS; @@ -371,7 +370,6 @@ public static class Builder definitions.put( "retry_policy_6_codes", ImmutableSet.copyOf(Lists.newArrayList(StatusCode.Code.UNAVAILABLE))); - definitions.put("no_retry_codes", ImmutableSet.copyOf(Lists.newArrayList())); definitions.put( "retry_policy_7_codes", ImmutableSet.copyOf( @@ -409,80 +407,98 @@ public static class Builder .setTotalTimeout(Duration.ofMillis(600000L)) .build(); definitions.put("retry_policy_7_params", settings); - settings = RetrySettings.newBuilder().setRpcTimeoutMultiplier(1.0).build(); - definitions.put("no_retry_params", settings); RETRY_PARAM_DEFINITIONS = definitions.build(); } protected Builder() { - this((ClientContext) null); + this(((ClientContext) null)); } protected Builder(ClientContext clientContext) { super(clientContext); + createWorkflowTemplateSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); + getWorkflowTemplateSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); instantiateWorkflowTemplateSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); - instantiateWorkflowTemplateOperationSettings = OperationCallSettings.newBuilder(); - instantiateInlineWorkflowTemplateSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); - instantiateInlineWorkflowTemplateOperationSettings = OperationCallSettings.newBuilder(); - - createWorkflowTemplateSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); - - getWorkflowTemplateSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); - updateWorkflowTemplateSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); - listWorkflowTemplatesSettings = PagedCallSettings.newBuilder(LIST_WORKFLOW_TEMPLATES_PAGE_STR_FACT); - deleteWorkflowTemplateSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); unaryMethodSettingsBuilders = ImmutableList.>of( + createWorkflowTemplateSettings, + getWorkflowTemplateSettings, instantiateWorkflowTemplateSettings, instantiateInlineWorkflowTemplateSettings, + updateWorkflowTemplateSettings, + listWorkflowTemplatesSettings, + deleteWorkflowTemplateSettings); + initDefaults(this); + } + + protected Builder(WorkflowTemplateServiceStubSettings settings) { + super(settings); + + createWorkflowTemplateSettings = settings.createWorkflowTemplateSettings.toBuilder(); + getWorkflowTemplateSettings = settings.getWorkflowTemplateSettings.toBuilder(); + instantiateWorkflowTemplateSettings = + settings.instantiateWorkflowTemplateSettings.toBuilder(); + instantiateWorkflowTemplateOperationSettings = + settings.instantiateWorkflowTemplateOperationSettings.toBuilder(); + instantiateInlineWorkflowTemplateSettings = + settings.instantiateInlineWorkflowTemplateSettings.toBuilder(); + instantiateInlineWorkflowTemplateOperationSettings = + settings.instantiateInlineWorkflowTemplateOperationSettings.toBuilder(); + updateWorkflowTemplateSettings = settings.updateWorkflowTemplateSettings.toBuilder(); + listWorkflowTemplatesSettings = settings.listWorkflowTemplatesSettings.toBuilder(); + deleteWorkflowTemplateSettings = settings.deleteWorkflowTemplateSettings.toBuilder(); + + unaryMethodSettingsBuilders = + ImmutableList.>of( createWorkflowTemplateSettings, getWorkflowTemplateSettings, + instantiateWorkflowTemplateSettings, + instantiateInlineWorkflowTemplateSettings, updateWorkflowTemplateSettings, listWorkflowTemplatesSettings, deleteWorkflowTemplateSettings); - - initDefaults(this); } private static Builder createDefault() { - Builder builder = new Builder((ClientContext) null); + Builder builder = new Builder(((ClientContext) null)); + builder.setTransportChannelProvider(defaultTransportChannelProvider()); builder.setCredentialsProvider(defaultCredentialsProviderBuilder().build()); builder.setInternalHeaderProvider(defaultApiClientHeaderProviderBuilder().build()); builder.setEndpoint(getDefaultEndpoint()); + return initDefaults(builder); } private static Builder initDefaults(Builder builder) { - builder - .instantiateWorkflowTemplateSettings() + .createWorkflowTemplateSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_6_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_6_params")); builder - .instantiateInlineWorkflowTemplateSettings() - .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_6_codes")) - .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_6_params")); + .getWorkflowTemplateSettings() + .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_7_codes")) + .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_7_params")); builder - .createWorkflowTemplateSettings() + .instantiateWorkflowTemplateSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_6_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_6_params")); builder - .getWorkflowTemplateSettings() - .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_7_codes")) - .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_7_params")); + .instantiateInlineWorkflowTemplateSettings() + .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_6_codes")) + .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_6_params")); builder .updateWorkflowTemplateSettings() @@ -498,6 +514,7 @@ private static Builder initDefaults(Builder builder) { .deleteWorkflowTemplateSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_6_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_6_params")); + builder .instantiateWorkflowTemplateOperationSettings() .setInitialCallSettings( @@ -517,11 +534,12 @@ private static Builder initDefaults(Builder builder) { .setInitialRetryDelay(Duration.ofMillis(1000L)) .setRetryDelayMultiplier(2.0) .setMaxRetryDelay(Duration.ofMillis(10000L)) - .setInitialRpcTimeout(Duration.ZERO) // ignored - .setRpcTimeoutMultiplier(1.0) // ignored - .setMaxRpcTimeout(Duration.ZERO) // ignored + .setInitialRpcTimeout(Duration.ZERO) + .setRpcTimeoutMultiplier(1.0) + .setMaxRpcTimeout(Duration.ZERO) .setTotalTimeout(Duration.ofMillis(43200000L)) .build())); + builder .instantiateInlineWorkflowTemplateOperationSettings() .setInitialCallSettings( @@ -541,44 +559,16 @@ private static Builder initDefaults(Builder builder) { .setInitialRetryDelay(Duration.ofMillis(1000L)) .setRetryDelayMultiplier(2.0) .setMaxRetryDelay(Duration.ofMillis(10000L)) - .setInitialRpcTimeout(Duration.ZERO) // ignored - .setRpcTimeoutMultiplier(1.0) // ignored - .setMaxRpcTimeout(Duration.ZERO) // ignored + .setInitialRpcTimeout(Duration.ZERO) + .setRpcTimeoutMultiplier(1.0) + .setMaxRpcTimeout(Duration.ZERO) .setTotalTimeout(Duration.ofMillis(43200000L)) .build())); return builder; } - protected Builder(WorkflowTemplateServiceStubSettings settings) { - super(settings); - - instantiateWorkflowTemplateSettings = - settings.instantiateWorkflowTemplateSettings.toBuilder(); - instantiateWorkflowTemplateOperationSettings = - settings.instantiateWorkflowTemplateOperationSettings.toBuilder(); - instantiateInlineWorkflowTemplateSettings = - settings.instantiateInlineWorkflowTemplateSettings.toBuilder(); - instantiateInlineWorkflowTemplateOperationSettings = - settings.instantiateInlineWorkflowTemplateOperationSettings.toBuilder(); - createWorkflowTemplateSettings = settings.createWorkflowTemplateSettings.toBuilder(); - getWorkflowTemplateSettings = settings.getWorkflowTemplateSettings.toBuilder(); - updateWorkflowTemplateSettings = settings.updateWorkflowTemplateSettings.toBuilder(); - listWorkflowTemplatesSettings = settings.listWorkflowTemplatesSettings.toBuilder(); - deleteWorkflowTemplateSettings = settings.deleteWorkflowTemplateSettings.toBuilder(); - - unaryMethodSettingsBuilders = - ImmutableList.>of( - instantiateWorkflowTemplateSettings, - instantiateInlineWorkflowTemplateSettings, - createWorkflowTemplateSettings, - getWorkflowTemplateSettings, - updateWorkflowTemplateSettings, - listWorkflowTemplatesSettings, - deleteWorkflowTemplateSettings); - } - - // NEXT_MAJOR_VER: remove 'throws Exception' + // NEXT_MAJOR_VER: remove 'throws Exception'. /** * Applies the given settings updater function to all of the unary API methods in this service. * @@ -594,6 +584,18 @@ public Builder applyToAllUnaryMethods( return unaryMethodSettingsBuilders; } + /** Returns the builder for the settings used for calls to createWorkflowTemplate. */ + public UnaryCallSettings.Builder + createWorkflowTemplateSettings() { + return createWorkflowTemplateSettings; + } + + /** Returns the builder for the settings used for calls to getWorkflowTemplate. */ + public UnaryCallSettings.Builder + getWorkflowTemplateSettings() { + return getWorkflowTemplateSettings; + } + /** Returns the builder for the settings used for calls to instantiateWorkflowTemplate. */ public UnaryCallSettings.Builder instantiateWorkflowTemplateSettings() { @@ -624,18 +626,6 @@ public Builder applyToAllUnaryMethods( return instantiateInlineWorkflowTemplateOperationSettings; } - /** Returns the builder for the settings used for calls to createWorkflowTemplate. */ - public UnaryCallSettings.Builder - createWorkflowTemplateSettings() { - return createWorkflowTemplateSettings; - } - - /** Returns the builder for the settings used for calls to getWorkflowTemplate. */ - public UnaryCallSettings.Builder - getWorkflowTemplateSettings() { - return getWorkflowTemplateSettings; - } - /** Returns the builder for the settings used for calls to updateWorkflowTemplate. */ public UnaryCallSettings.Builder updateWorkflowTemplateSettings() { diff --git a/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1/AutoscalingPolicyServiceClientTest.java b/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1/AutoscalingPolicyServiceClientTest.java index 7282abc0..f6e981c4 100644 --- a/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1/AutoscalingPolicyServiceClientTest.java +++ b/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1/AutoscalingPolicyServiceClientTest.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,6 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.dataproc.v1; import static com.google.cloud.dataproc.v1.AutoscalingPolicyServiceClient.ListAutoscalingPoliciesPagedResponse; @@ -27,12 +28,12 @@ import com.google.common.collect.Lists; import com.google.protobuf.AbstractMessage; import com.google.protobuf.Empty; -import io.grpc.Status; import io.grpc.StatusRuntimeException; import java.io.IOException; import java.util.Arrays; import java.util.List; import java.util.UUID; +import javax.annotation.Generated; import org.junit.After; import org.junit.AfterClass; import org.junit.Assert; @@ -40,42 +41,32 @@ import org.junit.BeforeClass; import org.junit.Test; -@javax.annotation.Generated("by GAPIC") +@Generated("by gapic-generator-java") public class AutoscalingPolicyServiceClientTest { private static MockAutoscalingPolicyService mockAutoscalingPolicyService; - private static MockClusterController mockClusterController; - private static MockJobController mockJobController; - private static MockWorkflowTemplateService mockWorkflowTemplateService; - private static MockServiceHelper serviceHelper; + private static MockServiceHelper mockServiceHelper; private AutoscalingPolicyServiceClient client; private LocalChannelProvider channelProvider; @BeforeClass public static void startStaticServer() { mockAutoscalingPolicyService = new MockAutoscalingPolicyService(); - mockClusterController = new MockClusterController(); - mockJobController = new MockJobController(); - mockWorkflowTemplateService = new MockWorkflowTemplateService(); - serviceHelper = + mockServiceHelper = new MockServiceHelper( UUID.randomUUID().toString(), - Arrays.asList( - mockAutoscalingPolicyService, - mockClusterController, - mockJobController, - mockWorkflowTemplateService)); - serviceHelper.start(); + Arrays.asList(mockAutoscalingPolicyService)); + mockServiceHelper.start(); } @AfterClass public static void stopServer() { - serviceHelper.stop(); + mockServiceHelper.stop(); } @Before public void setUp() throws IOException { - serviceHelper.reset(); - channelProvider = serviceHelper.createChannelProvider(); + mockServiceHelper.reset(); + channelProvider = mockServiceHelper.createChannelProvider(); AutoscalingPolicyServiceSettings settings = AutoscalingPolicyServiceSettings.newBuilder() .setTransportChannelProvider(channelProvider) @@ -90,17 +81,20 @@ public void tearDown() throws Exception { } @Test - @SuppressWarnings("all") - public void createAutoscalingPolicyTest() { - String id = "id3355"; - AutoscalingPolicyName name = - AutoscalingPolicyName.ofProjectLocationAutoscalingPolicyName( - "[PROJECT]", "[LOCATION]", "[AUTOSCALING_POLICY]"); + public void createAutoscalingPolicyTest() throws Exception { AutoscalingPolicy expectedResponse = - AutoscalingPolicy.newBuilder().setId(id).setName(name.toString()).build(); + AutoscalingPolicy.newBuilder() + .setId("id3355") + .setName( + AutoscalingPolicyName.ofProjectLocationAutoscalingPolicyName( + "[PROJECT]", "[LOCATION]", "[AUTOSCALING_POLICY]") + .toString()) + .setWorkerConfig(InstanceGroupAutoscalingPolicyConfig.newBuilder().build()) + .setSecondaryWorkerConfig(InstanceGroupAutoscalingPolicyConfig.newBuilder().build()) + .build(); mockAutoscalingPolicyService.addResponse(expectedResponse); - RegionName parent = RegionName.of("[PROJECT]", "[REGION]"); + LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]"); AutoscalingPolicy policy = AutoscalingPolicy.newBuilder().build(); AutoscalingPolicy actualResponse = client.createAutoscalingPolicy(parent, policy); @@ -109,9 +103,9 @@ public void createAutoscalingPolicyTest() { List actualRequests = mockAutoscalingPolicyService.getRequests(); Assert.assertEquals(1, actualRequests.size()); CreateAutoscalingPolicyRequest actualRequest = - (CreateAutoscalingPolicyRequest) actualRequests.get(0); + ((CreateAutoscalingPolicyRequest) actualRequests.get(0)); - Assert.assertEquals(parent, RegionName.parse(actualRequest.getParent())); + Assert.assertEquals(parent.toString(), actualRequest.getParent()); Assert.assertEquals(policy, actualRequest.getPolicy()); Assert.assertTrue( channelProvider.isHeaderSent( @@ -120,31 +114,128 @@ public void createAutoscalingPolicyTest() { } @Test - @SuppressWarnings("all") public void createAutoscalingPolicyExceptionTest() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); + mockAutoscalingPolicyService.addException(exception); + + try { + LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]"); + AutoscalingPolicy policy = AutoscalingPolicy.newBuilder().build(); + client.createAutoscalingPolicy(parent, policy); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception. + } + } + + @Test + public void createAutoscalingPolicyTest2() throws Exception { + AutoscalingPolicy expectedResponse = + AutoscalingPolicy.newBuilder() + .setId("id3355") + .setName( + AutoscalingPolicyName.ofProjectLocationAutoscalingPolicyName( + "[PROJECT]", "[LOCATION]", "[AUTOSCALING_POLICY]") + .toString()) + .setWorkerConfig(InstanceGroupAutoscalingPolicyConfig.newBuilder().build()) + .setSecondaryWorkerConfig(InstanceGroupAutoscalingPolicyConfig.newBuilder().build()) + .build(); + mockAutoscalingPolicyService.addResponse(expectedResponse); + + RegionName parent = RegionName.of("[PROJECT]", "[REGION]"); + AutoscalingPolicy policy = AutoscalingPolicy.newBuilder().build(); + + AutoscalingPolicy actualResponse = client.createAutoscalingPolicy(parent, policy); + Assert.assertEquals(expectedResponse, actualResponse); + + List actualRequests = mockAutoscalingPolicyService.getRequests(); + Assert.assertEquals(1, actualRequests.size()); + CreateAutoscalingPolicyRequest actualRequest = + ((CreateAutoscalingPolicyRequest) actualRequests.get(0)); + + Assert.assertEquals(parent.toString(), actualRequest.getParent()); + Assert.assertEquals(policy, actualRequest.getPolicy()); + Assert.assertTrue( + channelProvider.isHeaderSent( + ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), + GaxGrpcProperties.getDefaultApiClientHeaderPattern())); + } + + @Test + public void createAutoscalingPolicyExceptionTest2() throws Exception { + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockAutoscalingPolicyService.addException(exception); try { RegionName parent = RegionName.of("[PROJECT]", "[REGION]"); AutoscalingPolicy policy = AutoscalingPolicy.newBuilder().build(); + client.createAutoscalingPolicy(parent, policy); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception. + } + } + + @Test + public void createAutoscalingPolicyTest3() throws Exception { + AutoscalingPolicy expectedResponse = + AutoscalingPolicy.newBuilder() + .setId("id3355") + .setName( + AutoscalingPolicyName.ofProjectLocationAutoscalingPolicyName( + "[PROJECT]", "[LOCATION]", "[AUTOSCALING_POLICY]") + .toString()) + .setWorkerConfig(InstanceGroupAutoscalingPolicyConfig.newBuilder().build()) + .setSecondaryWorkerConfig(InstanceGroupAutoscalingPolicyConfig.newBuilder().build()) + .build(); + mockAutoscalingPolicyService.addResponse(expectedResponse); + + String parent = "parent-995424086"; + AutoscalingPolicy policy = AutoscalingPolicy.newBuilder().build(); + AutoscalingPolicy actualResponse = client.createAutoscalingPolicy(parent, policy); + Assert.assertEquals(expectedResponse, actualResponse); + + List actualRequests = mockAutoscalingPolicyService.getRequests(); + Assert.assertEquals(1, actualRequests.size()); + CreateAutoscalingPolicyRequest actualRequest = + ((CreateAutoscalingPolicyRequest) actualRequests.get(0)); + + Assert.assertEquals(parent, actualRequest.getParent()); + Assert.assertEquals(policy, actualRequest.getPolicy()); + Assert.assertTrue( + channelProvider.isHeaderSent( + ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), + GaxGrpcProperties.getDefaultApiClientHeaderPattern())); + } + + @Test + public void createAutoscalingPolicyExceptionTest3() throws Exception { + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); + mockAutoscalingPolicyService.addException(exception); + + try { + String parent = "parent-995424086"; + AutoscalingPolicy policy = AutoscalingPolicy.newBuilder().build(); client.createAutoscalingPolicy(parent, policy); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { - // Expected exception + // Expected exception. } } @Test - @SuppressWarnings("all") - public void updateAutoscalingPolicyTest() { - String id = "id3355"; - AutoscalingPolicyName name = - AutoscalingPolicyName.ofProjectLocationAutoscalingPolicyName( - "[PROJECT]", "[LOCATION]", "[AUTOSCALING_POLICY]"); + public void updateAutoscalingPolicyTest() throws Exception { AutoscalingPolicy expectedResponse = - AutoscalingPolicy.newBuilder().setId(id).setName(name.toString()).build(); + AutoscalingPolicy.newBuilder() + .setId("id3355") + .setName( + AutoscalingPolicyName.ofProjectLocationAutoscalingPolicyName( + "[PROJECT]", "[LOCATION]", "[AUTOSCALING_POLICY]") + .toString()) + .setWorkerConfig(InstanceGroupAutoscalingPolicyConfig.newBuilder().build()) + .setSecondaryWorkerConfig(InstanceGroupAutoscalingPolicyConfig.newBuilder().build()) + .build(); mockAutoscalingPolicyService.addResponse(expectedResponse); AutoscalingPolicy policy = AutoscalingPolicy.newBuilder().build(); @@ -155,7 +246,7 @@ public void updateAutoscalingPolicyTest() { List actualRequests = mockAutoscalingPolicyService.getRequests(); Assert.assertEquals(1, actualRequests.size()); UpdateAutoscalingPolicyRequest actualRequest = - (UpdateAutoscalingPolicyRequest) actualRequests.get(0); + ((UpdateAutoscalingPolicyRequest) actualRequests.get(0)); Assert.assertEquals(policy, actualRequest.getPolicy()); Assert.assertTrue( @@ -165,30 +256,31 @@ public void updateAutoscalingPolicyTest() { } @Test - @SuppressWarnings("all") public void updateAutoscalingPolicyExceptionTest() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockAutoscalingPolicyService.addException(exception); try { AutoscalingPolicy policy = AutoscalingPolicy.newBuilder().build(); - client.updateAutoscalingPolicy(policy); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { - // Expected exception + // Expected exception. } } @Test - @SuppressWarnings("all") - public void getAutoscalingPolicyTest() { - String id = "id3355"; - AutoscalingPolicyName name2 = - AutoscalingPolicyName.ofProjectLocationAutoscalingPolicyName( - "[PROJECT]", "[LOCATION]", "[AUTOSCALING_POLICY]"); + public void getAutoscalingPolicyTest() throws Exception { AutoscalingPolicy expectedResponse = - AutoscalingPolicy.newBuilder().setId(id).setName(name2.toString()).build(); + AutoscalingPolicy.newBuilder() + .setId("id3355") + .setName( + AutoscalingPolicyName.ofProjectLocationAutoscalingPolicyName( + "[PROJECT]", "[LOCATION]", "[AUTOSCALING_POLICY]") + .toString()) + .setWorkerConfig(InstanceGroupAutoscalingPolicyConfig.newBuilder().build()) + .setSecondaryWorkerConfig(InstanceGroupAutoscalingPolicyConfig.newBuilder().build()) + .build(); mockAutoscalingPolicyService.addResponse(expectedResponse); AutoscalingPolicyName name = @@ -200,9 +292,10 @@ public void getAutoscalingPolicyTest() { List actualRequests = mockAutoscalingPolicyService.getRequests(); Assert.assertEquals(1, actualRequests.size()); - GetAutoscalingPolicyRequest actualRequest = (GetAutoscalingPolicyRequest) actualRequests.get(0); + GetAutoscalingPolicyRequest actualRequest = + ((GetAutoscalingPolicyRequest) actualRequests.get(0)); - Assert.assertEquals(name, AutoscalingPolicyName.parse(actualRequest.getName())); + Assert.assertEquals(name.toString(), actualRequest.getName()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), @@ -210,50 +303,91 @@ public void getAutoscalingPolicyTest() { } @Test - @SuppressWarnings("all") public void getAutoscalingPolicyExceptionTest() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockAutoscalingPolicyService.addException(exception); try { AutoscalingPolicyName name = AutoscalingPolicyName.ofProjectLocationAutoscalingPolicyName( "[PROJECT]", "[LOCATION]", "[AUTOSCALING_POLICY]"); + client.getAutoscalingPolicy(name); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception. + } + } + + @Test + public void getAutoscalingPolicyTest2() throws Exception { + AutoscalingPolicy expectedResponse = + AutoscalingPolicy.newBuilder() + .setId("id3355") + .setName( + AutoscalingPolicyName.ofProjectLocationAutoscalingPolicyName( + "[PROJECT]", "[LOCATION]", "[AUTOSCALING_POLICY]") + .toString()) + .setWorkerConfig(InstanceGroupAutoscalingPolicyConfig.newBuilder().build()) + .setSecondaryWorkerConfig(InstanceGroupAutoscalingPolicyConfig.newBuilder().build()) + .build(); + mockAutoscalingPolicyService.addResponse(expectedResponse); + + String name = "name3373707"; + + AutoscalingPolicy actualResponse = client.getAutoscalingPolicy(name); + Assert.assertEquals(expectedResponse, actualResponse); + + List actualRequests = mockAutoscalingPolicyService.getRequests(); + Assert.assertEquals(1, actualRequests.size()); + GetAutoscalingPolicyRequest actualRequest = + ((GetAutoscalingPolicyRequest) actualRequests.get(0)); + + Assert.assertEquals(name, actualRequest.getName()); + Assert.assertTrue( + channelProvider.isHeaderSent( + ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), + GaxGrpcProperties.getDefaultApiClientHeaderPattern())); + } + @Test + public void getAutoscalingPolicyExceptionTest2() throws Exception { + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); + mockAutoscalingPolicyService.addException(exception); + + try { + String name = "name3373707"; client.getAutoscalingPolicy(name); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { - // Expected exception + // Expected exception. } } @Test - @SuppressWarnings("all") - public void listAutoscalingPoliciesTest() { - String nextPageToken = ""; - AutoscalingPolicy policiesElement = AutoscalingPolicy.newBuilder().build(); - List policies = Arrays.asList(policiesElement); + public void listAutoscalingPoliciesTest() throws Exception { + AutoscalingPolicy responsesElement = AutoscalingPolicy.newBuilder().build(); ListAutoscalingPoliciesResponse expectedResponse = ListAutoscalingPoliciesResponse.newBuilder() - .setNextPageToken(nextPageToken) - .addAllPolicies(policies) + .setNextPageToken("") + .addAllPolicies(Arrays.asList(responsesElement)) .build(); mockAutoscalingPolicyService.addResponse(expectedResponse); - RegionName parent = RegionName.of("[PROJECT]", "[REGION]"); + LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]"); ListAutoscalingPoliciesPagedResponse pagedListResponse = client.listAutoscalingPolicies(parent); List resources = Lists.newArrayList(pagedListResponse.iterateAll()); + Assert.assertEquals(1, resources.size()); Assert.assertEquals(expectedResponse.getPoliciesList().get(0), resources.get(0)); List actualRequests = mockAutoscalingPolicyService.getRequests(); Assert.assertEquals(1, actualRequests.size()); ListAutoscalingPoliciesRequest actualRequest = - (ListAutoscalingPoliciesRequest) actualRequests.get(0); + ((ListAutoscalingPoliciesRequest) actualRequests.get(0)); - Assert.assertEquals(parent, RegionName.parse(actualRequest.getParent())); + Assert.assertEquals(parent.toString(), actualRequest.getParent()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), @@ -261,24 +395,111 @@ public void listAutoscalingPoliciesTest() { } @Test - @SuppressWarnings("all") public void listAutoscalingPoliciesExceptionTest() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); + mockAutoscalingPolicyService.addException(exception); + + try { + LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]"); + client.listAutoscalingPolicies(parent); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception. + } + } + + @Test + public void listAutoscalingPoliciesTest2() throws Exception { + AutoscalingPolicy responsesElement = AutoscalingPolicy.newBuilder().build(); + ListAutoscalingPoliciesResponse expectedResponse = + ListAutoscalingPoliciesResponse.newBuilder() + .setNextPageToken("") + .addAllPolicies(Arrays.asList(responsesElement)) + .build(); + mockAutoscalingPolicyService.addResponse(expectedResponse); + + RegionName parent = RegionName.of("[PROJECT]", "[REGION]"); + + ListAutoscalingPoliciesPagedResponse pagedListResponse = client.listAutoscalingPolicies(parent); + + List resources = Lists.newArrayList(pagedListResponse.iterateAll()); + + Assert.assertEquals(1, resources.size()); + Assert.assertEquals(expectedResponse.getPoliciesList().get(0), resources.get(0)); + + List actualRequests = mockAutoscalingPolicyService.getRequests(); + Assert.assertEquals(1, actualRequests.size()); + ListAutoscalingPoliciesRequest actualRequest = + ((ListAutoscalingPoliciesRequest) actualRequests.get(0)); + + Assert.assertEquals(parent.toString(), actualRequest.getParent()); + Assert.assertTrue( + channelProvider.isHeaderSent( + ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), + GaxGrpcProperties.getDefaultApiClientHeaderPattern())); + } + + @Test + public void listAutoscalingPoliciesExceptionTest2() throws Exception { + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockAutoscalingPolicyService.addException(exception); try { RegionName parent = RegionName.of("[PROJECT]", "[REGION]"); + client.listAutoscalingPolicies(parent); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception. + } + } + + @Test + public void listAutoscalingPoliciesTest3() throws Exception { + AutoscalingPolicy responsesElement = AutoscalingPolicy.newBuilder().build(); + ListAutoscalingPoliciesResponse expectedResponse = + ListAutoscalingPoliciesResponse.newBuilder() + .setNextPageToken("") + .addAllPolicies(Arrays.asList(responsesElement)) + .build(); + mockAutoscalingPolicyService.addResponse(expectedResponse); + + String parent = "parent-995424086"; + + ListAutoscalingPoliciesPagedResponse pagedListResponse = client.listAutoscalingPolicies(parent); + + List resources = Lists.newArrayList(pagedListResponse.iterateAll()); + Assert.assertEquals(1, resources.size()); + Assert.assertEquals(expectedResponse.getPoliciesList().get(0), resources.get(0)); + + List actualRequests = mockAutoscalingPolicyService.getRequests(); + Assert.assertEquals(1, actualRequests.size()); + ListAutoscalingPoliciesRequest actualRequest = + ((ListAutoscalingPoliciesRequest) actualRequests.get(0)); + + Assert.assertEquals(parent, actualRequest.getParent()); + Assert.assertTrue( + channelProvider.isHeaderSent( + ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), + GaxGrpcProperties.getDefaultApiClientHeaderPattern())); + } + + @Test + public void listAutoscalingPoliciesExceptionTest3() throws Exception { + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); + mockAutoscalingPolicyService.addException(exception); + + try { + String parent = "parent-995424086"; client.listAutoscalingPolicies(parent); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { - // Expected exception + // Expected exception. } } @Test - @SuppressWarnings("all") - public void deleteAutoscalingPolicyTest() { + public void deleteAutoscalingPolicyTest() throws Exception { Empty expectedResponse = Empty.newBuilder().build(); mockAutoscalingPolicyService.addResponse(expectedResponse); @@ -291,9 +512,9 @@ public void deleteAutoscalingPolicyTest() { List actualRequests = mockAutoscalingPolicyService.getRequests(); Assert.assertEquals(1, actualRequests.size()); DeleteAutoscalingPolicyRequest actualRequest = - (DeleteAutoscalingPolicyRequest) actualRequests.get(0); + ((DeleteAutoscalingPolicyRequest) actualRequests.get(0)); - Assert.assertEquals(name, AutoscalingPolicyName.parse(actualRequest.getName())); + Assert.assertEquals(name.toString(), actualRequest.getName()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), @@ -301,20 +522,53 @@ public void deleteAutoscalingPolicyTest() { } @Test - @SuppressWarnings("all") public void deleteAutoscalingPolicyExceptionTest() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockAutoscalingPolicyService.addException(exception); try { AutoscalingPolicyName name = AutoscalingPolicyName.ofProjectLocationAutoscalingPolicyName( "[PROJECT]", "[LOCATION]", "[AUTOSCALING_POLICY]"); + client.deleteAutoscalingPolicy(name); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception. + } + } + @Test + public void deleteAutoscalingPolicyTest2() throws Exception { + Empty expectedResponse = Empty.newBuilder().build(); + mockAutoscalingPolicyService.addResponse(expectedResponse); + + String name = "name3373707"; + + client.deleteAutoscalingPolicy(name); + + List actualRequests = mockAutoscalingPolicyService.getRequests(); + Assert.assertEquals(1, actualRequests.size()); + DeleteAutoscalingPolicyRequest actualRequest = + ((DeleteAutoscalingPolicyRequest) actualRequests.get(0)); + + Assert.assertEquals(name, actualRequest.getName()); + Assert.assertTrue( + channelProvider.isHeaderSent( + ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), + GaxGrpcProperties.getDefaultApiClientHeaderPattern())); + } + + @Test + public void deleteAutoscalingPolicyExceptionTest2() throws Exception { + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); + mockAutoscalingPolicyService.addException(exception); + + try { + String name = "name3373707"; client.deleteAutoscalingPolicy(name); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { - // Expected exception + // Expected exception. } } } diff --git a/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1/ClusterControllerClientTest.java b/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1/ClusterControllerClientTest.java index 8c1163ce..40d059f8 100644 --- a/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1/ClusterControllerClientTest.java +++ b/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1/ClusterControllerClientTest.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,6 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.dataproc.v1; import static com.google.cloud.dataproc.v1.ClusterControllerClient.ListClustersPagedResponse; @@ -31,13 +32,15 @@ import com.google.protobuf.Any; import com.google.protobuf.Empty; import com.google.protobuf.FieldMask; -import io.grpc.Status; import io.grpc.StatusRuntimeException; import java.io.IOException; +import java.util.ArrayList; import java.util.Arrays; +import java.util.HashMap; import java.util.List; import java.util.UUID; import java.util.concurrent.ExecutionException; +import javax.annotation.Generated; import org.junit.After; import org.junit.AfterClass; import org.junit.Assert; @@ -45,42 +48,31 @@ import org.junit.BeforeClass; import org.junit.Test; -@javax.annotation.Generated("by GAPIC") +@Generated("by gapic-generator-java") public class ClusterControllerClientTest { - private static MockAutoscalingPolicyService mockAutoscalingPolicyService; private static MockClusterController mockClusterController; - private static MockJobController mockJobController; - private static MockWorkflowTemplateService mockWorkflowTemplateService; - private static MockServiceHelper serviceHelper; + private static MockServiceHelper mockServiceHelper; private ClusterControllerClient client; private LocalChannelProvider channelProvider; @BeforeClass public static void startStaticServer() { - mockAutoscalingPolicyService = new MockAutoscalingPolicyService(); mockClusterController = new MockClusterController(); - mockJobController = new MockJobController(); - mockWorkflowTemplateService = new MockWorkflowTemplateService(); - serviceHelper = + mockServiceHelper = new MockServiceHelper( - UUID.randomUUID().toString(), - Arrays.asList( - mockAutoscalingPolicyService, - mockClusterController, - mockJobController, - mockWorkflowTemplateService)); - serviceHelper.start(); + UUID.randomUUID().toString(), Arrays.asList(mockClusterController)); + mockServiceHelper.start(); } @AfterClass public static void stopServer() { - serviceHelper.stop(); + mockServiceHelper.stop(); } @Before public void setUp() throws IOException { - serviceHelper.reset(); - channelProvider = serviceHelper.createChannelProvider(); + mockServiceHelper.reset(); + channelProvider = mockServiceHelper.createChannelProvider(); ClusterControllerSettings settings = ClusterControllerSettings.newBuilder() .setTransportChannelProvider(channelProvider) @@ -95,16 +87,17 @@ public void tearDown() throws Exception { } @Test - @SuppressWarnings("all") public void createClusterTest() throws Exception { - String projectId2 = "projectId2939242356"; - String clusterName = "clusterName-1018081872"; - String clusterUuid = "clusterUuid-1017854240"; Cluster expectedResponse = Cluster.newBuilder() - .setProjectId(projectId2) - .setClusterName(clusterName) - .setClusterUuid(clusterUuid) + .setProjectId("projectId-894832108") + .setClusterName("clusterName-1141738587") + .setConfig(ClusterConfig.newBuilder().build()) + .putAllLabels(new HashMap()) + .setStatus(ClusterStatus.newBuilder().build()) + .addAllStatusHistory(new ArrayList()) + .setClusterUuid("clusterUuid-1141510955") + .setMetrics(ClusterMetrics.newBuilder().build()) .build(); Operation resultOperation = Operation.newBuilder() @@ -114,7 +107,7 @@ public void createClusterTest() throws Exception { .build(); mockClusterController.addResponse(resultOperation); - String projectId = "projectId-1969970175"; + String projectId = "projectId-894832108"; String region = "region-934795532"; Cluster cluster = Cluster.newBuilder().build(); @@ -123,7 +116,7 @@ public void createClusterTest() throws Exception { List actualRequests = mockClusterController.getRequests(); Assert.assertEquals(1, actualRequests.size()); - CreateClusterRequest actualRequest = (CreateClusterRequest) actualRequests.get(0); + CreateClusterRequest actualRequest = ((CreateClusterRequest) actualRequests.get(0)); Assert.assertEquals(projectId, actualRequest.getProjectId()); Assert.assertEquals(region, actualRequest.getRegion()); @@ -135,36 +128,35 @@ public void createClusterTest() throws Exception { } @Test - @SuppressWarnings("all") public void createClusterExceptionTest() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockClusterController.addException(exception); try { - String projectId = "projectId-1969970175"; + String projectId = "projectId-894832108"; String region = "region-934795532"; Cluster cluster = Cluster.newBuilder().build(); - client.createClusterAsync(projectId, region, cluster).get(); Assert.fail("No exception raised"); } catch (ExecutionException e) { Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass()); - InvalidArgumentException apiException = (InvalidArgumentException) e.getCause(); + InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause()); Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode()); } } @Test - @SuppressWarnings("all") public void updateClusterTest() throws Exception { - String projectId2 = "projectId2939242356"; - String clusterName2 = "clusterName2875867491"; - String clusterUuid = "clusterUuid-1017854240"; Cluster expectedResponse = Cluster.newBuilder() - .setProjectId(projectId2) - .setClusterName(clusterName2) - .setClusterUuid(clusterUuid) + .setProjectId("projectId-894832108") + .setClusterName("clusterName-1141738587") + .setConfig(ClusterConfig.newBuilder().build()) + .putAllLabels(new HashMap()) + .setStatus(ClusterStatus.newBuilder().build()) + .addAllStatusHistory(new ArrayList()) + .setClusterUuid("clusterUuid-1141510955") + .setMetrics(ClusterMetrics.newBuilder().build()) .build(); Operation resultOperation = Operation.newBuilder() @@ -174,9 +166,9 @@ public void updateClusterTest() throws Exception { .build(); mockClusterController.addResponse(resultOperation); - String projectId = "projectId-1969970175"; + String projectId = "projectId-894832108"; String region = "region-934795532"; - String clusterName = "clusterName-1018081872"; + String clusterName = "clusterName-1141738587"; Cluster cluster = Cluster.newBuilder().build(); FieldMask updateMask = FieldMask.newBuilder().build(); @@ -186,7 +178,7 @@ public void updateClusterTest() throws Exception { List actualRequests = mockClusterController.getRequests(); Assert.assertEquals(1, actualRequests.size()); - UpdateClusterRequest actualRequest = (UpdateClusterRequest) actualRequests.get(0); + UpdateClusterRequest actualRequest = ((UpdateClusterRequest) actualRequests.get(0)); Assert.assertEquals(projectId, actualRequest.getProjectId()); Assert.assertEquals(region, actualRequest.getRegion()); @@ -200,29 +192,26 @@ public void updateClusterTest() throws Exception { } @Test - @SuppressWarnings("all") public void updateClusterExceptionTest() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockClusterController.addException(exception); try { - String projectId = "projectId-1969970175"; + String projectId = "projectId-894832108"; String region = "region-934795532"; - String clusterName = "clusterName-1018081872"; + String clusterName = "clusterName-1141738587"; Cluster cluster = Cluster.newBuilder().build(); FieldMask updateMask = FieldMask.newBuilder().build(); - client.updateClusterAsync(projectId, region, clusterName, cluster, updateMask).get(); Assert.fail("No exception raised"); } catch (ExecutionException e) { Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass()); - InvalidArgumentException apiException = (InvalidArgumentException) e.getCause(); + InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause()); Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode()); } } @Test - @SuppressWarnings("all") public void deleteClusterTest() throws Exception { Empty expectedResponse = Empty.newBuilder().build(); Operation resultOperation = @@ -233,16 +222,15 @@ public void deleteClusterTest() throws Exception { .build(); mockClusterController.addResponse(resultOperation); - String projectId = "projectId-1969970175"; + String projectId = "projectId-894832108"; String region = "region-934795532"; - String clusterName = "clusterName-1018081872"; + String clusterName = "clusterName-1141738587"; - Empty actualResponse = client.deleteClusterAsync(projectId, region, clusterName).get(); - Assert.assertEquals(expectedResponse, actualResponse); + client.deleteClusterAsync(projectId, region, clusterName).get(); List actualRequests = mockClusterController.getRequests(); Assert.assertEquals(1, actualRequests.size()); - DeleteClusterRequest actualRequest = (DeleteClusterRequest) actualRequests.get(0); + DeleteClusterRequest actualRequest = ((DeleteClusterRequest) actualRequests.get(0)); Assert.assertEquals(projectId, actualRequest.getProjectId()); Assert.assertEquals(region, actualRequest.getRegion()); @@ -254,50 +242,48 @@ public void deleteClusterTest() throws Exception { } @Test - @SuppressWarnings("all") public void deleteClusterExceptionTest() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockClusterController.addException(exception); try { - String projectId = "projectId-1969970175"; + String projectId = "projectId-894832108"; String region = "region-934795532"; - String clusterName = "clusterName-1018081872"; - + String clusterName = "clusterName-1141738587"; client.deleteClusterAsync(projectId, region, clusterName).get(); Assert.fail("No exception raised"); } catch (ExecutionException e) { Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass()); - InvalidArgumentException apiException = (InvalidArgumentException) e.getCause(); + InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause()); Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode()); } } @Test - @SuppressWarnings("all") - public void diagnoseClusterTest() throws Exception { - String outputUri = "outputUri-1273518802"; - DiagnoseClusterResults expectedResponse = - DiagnoseClusterResults.newBuilder().setOutputUri(outputUri).build(); - Operation resultOperation = - Operation.newBuilder() - .setName("diagnoseClusterTest") - .setDone(true) - .setResponse(Any.pack(expectedResponse)) + public void getClusterTest() throws Exception { + Cluster expectedResponse = + Cluster.newBuilder() + .setProjectId("projectId-894832108") + .setClusterName("clusterName-1141738587") + .setConfig(ClusterConfig.newBuilder().build()) + .putAllLabels(new HashMap()) + .setStatus(ClusterStatus.newBuilder().build()) + .addAllStatusHistory(new ArrayList()) + .setClusterUuid("clusterUuid-1141510955") + .setMetrics(ClusterMetrics.newBuilder().build()) .build(); - mockClusterController.addResponse(resultOperation); + mockClusterController.addResponse(expectedResponse); - String projectId = "projectId-1969970175"; + String projectId = "projectId-894832108"; String region = "region-934795532"; - String clusterName = "clusterName-1018081872"; + String clusterName = "clusterName-1141738587"; - DiagnoseClusterResults actualResponse = - client.diagnoseClusterAsync(projectId, region, clusterName).get(); + Cluster actualResponse = client.getCluster(projectId, region, clusterName); Assert.assertEquals(expectedResponse, actualResponse); List actualRequests = mockClusterController.getRequests(); Assert.assertEquals(1, actualRequests.size()); - DiagnoseClusterRequest actualRequest = (DiagnoseClusterRequest) actualRequests.get(0); + GetClusterRequest actualRequest = ((GetClusterRequest) actualRequests.get(0)); Assert.assertEquals(projectId, actualRequest.getProjectId()); Assert.assertEquals(region, actualRequest.getRegion()); @@ -309,53 +295,47 @@ public void diagnoseClusterTest() throws Exception { } @Test - @SuppressWarnings("all") - public void diagnoseClusterExceptionTest() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); + public void getClusterExceptionTest() throws Exception { + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockClusterController.addException(exception); try { - String projectId = "projectId-1969970175"; + String projectId = "projectId-894832108"; String region = "region-934795532"; - String clusterName = "clusterName-1018081872"; - - client.diagnoseClusterAsync(projectId, region, clusterName).get(); + String clusterName = "clusterName-1141738587"; + client.getCluster(projectId, region, clusterName); Assert.fail("No exception raised"); - } catch (ExecutionException e) { - Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass()); - InvalidArgumentException apiException = (InvalidArgumentException) e.getCause(); - Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode()); + } catch (InvalidArgumentException e) { + // Expected exception. } } @Test - @SuppressWarnings("all") - public void getClusterTest() { - String projectId2 = "projectId2939242356"; - String clusterName2 = "clusterName2875867491"; - String clusterUuid = "clusterUuid-1017854240"; - Cluster expectedResponse = - Cluster.newBuilder() - .setProjectId(projectId2) - .setClusterName(clusterName2) - .setClusterUuid(clusterUuid) + public void listClustersTest() throws Exception { + Cluster responsesElement = Cluster.newBuilder().build(); + ListClustersResponse expectedResponse = + ListClustersResponse.newBuilder() + .setNextPageToken("") + .addAllClusters(Arrays.asList(responsesElement)) .build(); mockClusterController.addResponse(expectedResponse); - String projectId = "projectId-1969970175"; + String projectId = "projectId-894832108"; String region = "region-934795532"; - String clusterName = "clusterName-1018081872"; - Cluster actualResponse = client.getCluster(projectId, region, clusterName); - Assert.assertEquals(expectedResponse, actualResponse); + ListClustersPagedResponse pagedListResponse = client.listClusters(projectId, region); + + List resources = Lists.newArrayList(pagedListResponse.iterateAll()); + + Assert.assertEquals(1, resources.size()); + Assert.assertEquals(expectedResponse.getClustersList().get(0), resources.get(0)); List actualRequests = mockClusterController.getRequests(); Assert.assertEquals(1, actualRequests.size()); - GetClusterRequest actualRequest = (GetClusterRequest) actualRequests.get(0); + ListClustersRequest actualRequest = ((ListClustersRequest) actualRequests.get(0)); Assert.assertEquals(projectId, actualRequest.getProjectId()); Assert.assertEquals(region, actualRequest.getRegion()); - Assert.assertEquals(clusterName, actualRequest.getClusterName()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), @@ -363,51 +343,48 @@ public void getClusterTest() { } @Test - @SuppressWarnings("all") - public void getClusterExceptionTest() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); + public void listClustersExceptionTest() throws Exception { + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockClusterController.addException(exception); try { - String projectId = "projectId-1969970175"; + String projectId = "projectId-894832108"; String region = "region-934795532"; - String clusterName = "clusterName-1018081872"; - - client.getCluster(projectId, region, clusterName); + client.listClusters(projectId, region); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { - // Expected exception + // Expected exception. } } @Test - @SuppressWarnings("all") - public void listClustersTest() { - String nextPageToken = ""; - Cluster clustersElement = Cluster.newBuilder().build(); - List clusters = Arrays.asList(clustersElement); + public void listClustersTest2() throws Exception { + Cluster responsesElement = Cluster.newBuilder().build(); ListClustersResponse expectedResponse = ListClustersResponse.newBuilder() - .setNextPageToken(nextPageToken) - .addAllClusters(clusters) + .setNextPageToken("") + .addAllClusters(Arrays.asList(responsesElement)) .build(); mockClusterController.addResponse(expectedResponse); - String projectId = "projectId-1969970175"; + String projectId = "projectId-894832108"; String region = "region-934795532"; + String filter = "filter-1274492040"; - ListClustersPagedResponse pagedListResponse = client.listClusters(projectId, region); + ListClustersPagedResponse pagedListResponse = client.listClusters(projectId, region, filter); List resources = Lists.newArrayList(pagedListResponse.iterateAll()); + Assert.assertEquals(1, resources.size()); Assert.assertEquals(expectedResponse.getClustersList().get(0), resources.get(0)); List actualRequests = mockClusterController.getRequests(); Assert.assertEquals(1, actualRequests.size()); - ListClustersRequest actualRequest = (ListClustersRequest) actualRequests.get(0); + ListClustersRequest actualRequest = ((ListClustersRequest) actualRequests.get(0)); Assert.assertEquals(projectId, actualRequest.getProjectId()); Assert.assertEquals(region, actualRequest.getRegion()); + Assert.assertEquals(filter, actualRequest.getFilter()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), @@ -415,50 +392,48 @@ public void listClustersTest() { } @Test - @SuppressWarnings("all") - public void listClustersExceptionTest() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); + public void listClustersExceptionTest2() throws Exception { + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockClusterController.addException(exception); try { - String projectId = "projectId-1969970175"; + String projectId = "projectId-894832108"; String region = "region-934795532"; - - client.listClusters(projectId, region); + String filter = "filter-1274492040"; + client.listClusters(projectId, region, filter); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { - // Expected exception + // Expected exception. } } @Test - @SuppressWarnings("all") - public void listClustersTest2() { - String nextPageToken = ""; - Cluster clustersElement = Cluster.newBuilder().build(); - List clusters = Arrays.asList(clustersElement); - ListClustersResponse expectedResponse = - ListClustersResponse.newBuilder() - .setNextPageToken(nextPageToken) - .addAllClusters(clusters) + public void diagnoseClusterTest() throws Exception { + DiagnoseClusterResults expectedResponse = + DiagnoseClusterResults.newBuilder().setOutputUri("outputUri-2119300949").build(); + Operation resultOperation = + Operation.newBuilder() + .setName("diagnoseClusterTest") + .setDone(true) + .setResponse(Any.pack(expectedResponse)) .build(); - mockClusterController.addResponse(expectedResponse); + mockClusterController.addResponse(resultOperation); - String projectId = "projectId-1969970175"; + String projectId = "projectId-894832108"; String region = "region-934795532"; + String clusterName = "clusterName-1141738587"; - ListClustersPagedResponse pagedListResponse = client.listClusters(projectId, region); - - List resources = Lists.newArrayList(pagedListResponse.iterateAll()); - Assert.assertEquals(1, resources.size()); - Assert.assertEquals(expectedResponse.getClustersList().get(0), resources.get(0)); + DiagnoseClusterResults actualResponse = + client.diagnoseClusterAsync(projectId, region, clusterName).get(); + Assert.assertEquals(expectedResponse, actualResponse); List actualRequests = mockClusterController.getRequests(); Assert.assertEquals(1, actualRequests.size()); - ListClustersRequest actualRequest = (ListClustersRequest) actualRequests.get(0); + DiagnoseClusterRequest actualRequest = ((DiagnoseClusterRequest) actualRequests.get(0)); Assert.assertEquals(projectId, actualRequest.getProjectId()); Assert.assertEquals(region, actualRequest.getRegion()); + Assert.assertEquals(clusterName, actualRequest.getClusterName()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), @@ -466,19 +441,20 @@ public void listClustersTest2() { } @Test - @SuppressWarnings("all") - public void listClustersExceptionTest2() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); + public void diagnoseClusterExceptionTest() throws Exception { + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockClusterController.addException(exception); try { - String projectId = "projectId-1969970175"; + String projectId = "projectId-894832108"; String region = "region-934795532"; - - client.listClusters(projectId, region); + String clusterName = "clusterName-1141738587"; + client.diagnoseClusterAsync(projectId, region, clusterName).get(); Assert.fail("No exception raised"); - } catch (InvalidArgumentException e) { - // Expected exception + } catch (ExecutionException e) { + Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass()); + InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause()); + Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode()); } } } diff --git a/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1/ClusterControllerSmokeTest.java b/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1/ClusterControllerSmokeTest.java deleted file mode 100644 index 99a96095..00000000 --- a/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1/ClusterControllerSmokeTest.java +++ /dev/null @@ -1,65 +0,0 @@ -/* - * Copyright 2020 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.google.cloud.dataproc.v1; - -import static com.google.cloud.dataproc.v1.ClusterControllerClient.ListClustersPagedResponse; - -import com.google.common.base.Preconditions; -import java.util.logging.Level; -import java.util.logging.Logger; -import org.junit.Test; - -@javax.annotation.Generated("by GAPIC") -public class ClusterControllerSmokeTest { - private static final String PROJECT_ENV_NAME = "GOOGLE_CLOUD_PROJECT"; - private static final String LEGACY_PROJECT_ENV_NAME = "GCLOUD_PROJECT"; - - @Test - public void run() { - main(null); - } - - public static void main(String args[]) { - Logger.getLogger("").setLevel(Level.WARNING); - try { - executeNoCatch(getProjectId()); - System.out.println("OK"); - } catch (Exception e) { - System.err.println("Failed with exception:"); - e.printStackTrace(System.err); - System.exit(1); - } - } - - public static void executeNoCatch(String projectId) throws Exception { - try (ClusterControllerClient client = ClusterControllerClient.create()) { - String projectId2 = projectId; - String region = "global"; - - ListClustersPagedResponse pagedResponse = client.listClusters(projectId2, region); - } - } - - private static String getProjectId() { - String projectId = System.getProperty(PROJECT_ENV_NAME, System.getenv(PROJECT_ENV_NAME)); - if (projectId == null) { - projectId = - System.getProperty(LEGACY_PROJECT_ENV_NAME, System.getenv(LEGACY_PROJECT_ENV_NAME)); - } - Preconditions.checkArgument(projectId != null, "A project ID is required."); - return projectId; - } -} diff --git a/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1/JobControllerClientTest.java b/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1/JobControllerClientTest.java index 1999bf0f..6cf661b9 100644 --- a/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1/JobControllerClientTest.java +++ b/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1/JobControllerClientTest.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,6 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.dataproc.v1; import static com.google.cloud.dataproc.v1.JobControllerClient.ListJobsPagedResponse; @@ -31,13 +32,15 @@ import com.google.protobuf.Any; import com.google.protobuf.Empty; import com.google.protobuf.FieldMask; -import io.grpc.Status; import io.grpc.StatusRuntimeException; import java.io.IOException; +import java.util.ArrayList; import java.util.Arrays; +import java.util.HashMap; import java.util.List; import java.util.UUID; import java.util.concurrent.ExecutionException; +import javax.annotation.Generated; import org.junit.After; import org.junit.AfterClass; import org.junit.Assert; @@ -45,42 +48,31 @@ import org.junit.BeforeClass; import org.junit.Test; -@javax.annotation.Generated("by GAPIC") +@Generated("by gapic-generator-java") public class JobControllerClientTest { - private static MockAutoscalingPolicyService mockAutoscalingPolicyService; - private static MockClusterController mockClusterController; - private static MockJobController mockJobController; - private static MockWorkflowTemplateService mockWorkflowTemplateService; - private static MockServiceHelper serviceHelper; + private static MockServiceHelper mockServiceHelper; private JobControllerClient client; + private static MockJobController mockJobController; private LocalChannelProvider channelProvider; @BeforeClass public static void startStaticServer() { - mockAutoscalingPolicyService = new MockAutoscalingPolicyService(); - mockClusterController = new MockClusterController(); mockJobController = new MockJobController(); - mockWorkflowTemplateService = new MockWorkflowTemplateService(); - serviceHelper = + mockServiceHelper = new MockServiceHelper( - UUID.randomUUID().toString(), - Arrays.asList( - mockAutoscalingPolicyService, - mockClusterController, - mockJobController, - mockWorkflowTemplateService)); - serviceHelper.start(); + UUID.randomUUID().toString(), Arrays.asList(mockJobController)); + mockServiceHelper.start(); } @AfterClass public static void stopServer() { - serviceHelper.stop(); + mockServiceHelper.stop(); } @Before public void setUp() throws IOException { - serviceHelper.reset(); - channelProvider = serviceHelper.createChannelProvider(); + mockServiceHelper.reset(); + channelProvider = mockServiceHelper.createChannelProvider(); JobControllerSettings settings = JobControllerSettings.newBuilder() .setTransportChannelProvider(channelProvider) @@ -95,22 +87,24 @@ public void tearDown() throws Exception { } @Test - @SuppressWarnings("all") - public void submitJobTest() { - String driverOutputResourceUri = "driverOutputResourceUri-542229086"; - String driverControlFilesUri = "driverControlFilesUri207057643"; - String jobUuid = "jobUuid-1615012099"; - boolean done = true; + public void submitJobTest() throws Exception { Job expectedResponse = Job.newBuilder() - .setDriverOutputResourceUri(driverOutputResourceUri) - .setDriverControlFilesUri(driverControlFilesUri) - .setJobUuid(jobUuid) - .setDone(done) + .setReference(JobReference.newBuilder().build()) + .setPlacement(JobPlacement.newBuilder().build()) + .setStatus(JobStatus.newBuilder().build()) + .addAllStatusHistory(new ArrayList()) + .addAllYarnApplications(new ArrayList()) + .setDriverOutputResourceUri("driverOutputResourceUri794556277") + .setDriverControlFilesUri("driverControlFilesUri1491948202") + .putAllLabels(new HashMap()) + .setScheduling(JobScheduling.newBuilder().build()) + .setJobUuid("jobUuid-1437868776") + .setDone(true) .build(); mockJobController.addResponse(expectedResponse); - String projectId = "projectId-1969970175"; + String projectId = "projectId-894832108"; String region = "region-934795532"; Job job = Job.newBuilder().build(); @@ -119,7 +113,7 @@ public void submitJobTest() { List actualRequests = mockJobController.getRequests(); Assert.assertEquals(1, actualRequests.size()); - SubmitJobRequest actualRequest = (SubmitJobRequest) actualRequests.get(0); + SubmitJobRequest actualRequest = ((SubmitJobRequest) actualRequests.get(0)); Assert.assertEquals(projectId, actualRequest.getProjectId()); Assert.assertEquals(region, actualRequest.getRegion()); @@ -131,36 +125,36 @@ public void submitJobTest() { } @Test - @SuppressWarnings("all") public void submitJobExceptionTest() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockJobController.addException(exception); try { - String projectId = "projectId-1969970175"; + String projectId = "projectId-894832108"; String region = "region-934795532"; Job job = Job.newBuilder().build(); - client.submitJob(projectId, region, job); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { - // Expected exception + // Expected exception. } } @Test - @SuppressWarnings("all") public void submitJobAsOperationTest() throws Exception { - String driverOutputResourceUri = "driverOutputResourceUri-542229086"; - String driverControlFilesUri = "driverControlFilesUri207057643"; - String jobUuid = "jobUuid-1615012099"; - boolean done = true; Job expectedResponse = Job.newBuilder() - .setDriverOutputResourceUri(driverOutputResourceUri) - .setDriverControlFilesUri(driverControlFilesUri) - .setJobUuid(jobUuid) - .setDone(done) + .setReference(JobReference.newBuilder().build()) + .setPlacement(JobPlacement.newBuilder().build()) + .setStatus(JobStatus.newBuilder().build()) + .addAllStatusHistory(new ArrayList()) + .addAllYarnApplications(new ArrayList()) + .setDriverOutputResourceUri("driverOutputResourceUri794556277") + .setDriverControlFilesUri("driverControlFilesUri1491948202") + .putAllLabels(new HashMap()) + .setScheduling(JobScheduling.newBuilder().build()) + .setJobUuid("jobUuid-1437868776") + .setDone(true) .build(); Operation resultOperation = Operation.newBuilder() @@ -170,7 +164,7 @@ public void submitJobAsOperationTest() throws Exception { .build(); mockJobController.addResponse(resultOperation); - String projectId = "projectId-1969970175"; + String projectId = "projectId-894832108"; String region = "region-934795532"; Job job = Job.newBuilder().build(); @@ -179,7 +173,7 @@ public void submitJobAsOperationTest() throws Exception { List actualRequests = mockJobController.getRequests(); Assert.assertEquals(1, actualRequests.size()); - SubmitJobRequest actualRequest = (SubmitJobRequest) actualRequests.get(0); + SubmitJobRequest actualRequest = ((SubmitJobRequest) actualRequests.get(0)); Assert.assertEquals(projectId, actualRequest.getProjectId()); Assert.assertEquals(region, actualRequest.getRegion()); @@ -191,51 +185,51 @@ public void submitJobAsOperationTest() throws Exception { } @Test - @SuppressWarnings("all") public void submitJobAsOperationExceptionTest() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockJobController.addException(exception); try { - String projectId = "projectId-1969970175"; + String projectId = "projectId-894832108"; String region = "region-934795532"; Job job = Job.newBuilder().build(); - client.submitJobAsOperationAsync(projectId, region, job).get(); Assert.fail("No exception raised"); } catch (ExecutionException e) { Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass()); - InvalidArgumentException apiException = (InvalidArgumentException) e.getCause(); + InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause()); Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode()); } } @Test - @SuppressWarnings("all") - public void getJobTest() { - String driverOutputResourceUri = "driverOutputResourceUri-542229086"; - String driverControlFilesUri = "driverControlFilesUri207057643"; - String jobUuid = "jobUuid-1615012099"; - boolean done = true; + public void getJobTest() throws Exception { Job expectedResponse = Job.newBuilder() - .setDriverOutputResourceUri(driverOutputResourceUri) - .setDriverControlFilesUri(driverControlFilesUri) - .setJobUuid(jobUuid) - .setDone(done) + .setReference(JobReference.newBuilder().build()) + .setPlacement(JobPlacement.newBuilder().build()) + .setStatus(JobStatus.newBuilder().build()) + .addAllStatusHistory(new ArrayList()) + .addAllYarnApplications(new ArrayList()) + .setDriverOutputResourceUri("driverOutputResourceUri794556277") + .setDriverControlFilesUri("driverControlFilesUri1491948202") + .putAllLabels(new HashMap()) + .setScheduling(JobScheduling.newBuilder().build()) + .setJobUuid("jobUuid-1437868776") + .setDone(true) .build(); mockJobController.addResponse(expectedResponse); - String projectId = "projectId-1969970175"; + String projectId = "projectId-894832108"; String region = "region-934795532"; - String jobId = "jobId-1154752291"; + String jobId = "jobId101296568"; Job actualResponse = client.getJob(projectId, region, jobId); Assert.assertEquals(expectedResponse, actualResponse); List actualRequests = mockJobController.getRequests(); Assert.assertEquals(1, actualRequests.size()); - GetJobRequest actualRequest = (GetJobRequest) actualRequests.get(0); + GetJobRequest actualRequest = ((GetJobRequest) actualRequests.get(0)); Assert.assertEquals(projectId, actualRequest.getProjectId()); Assert.assertEquals(region, actualRequest.getRegion()); @@ -247,45 +241,44 @@ public void getJobTest() { } @Test - @SuppressWarnings("all") public void getJobExceptionTest() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockJobController.addException(exception); try { - String projectId = "projectId-1969970175"; + String projectId = "projectId-894832108"; String region = "region-934795532"; - String jobId = "jobId-1154752291"; - + String jobId = "jobId101296568"; client.getJob(projectId, region, jobId); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { - // Expected exception + // Expected exception. } } @Test - @SuppressWarnings("all") - public void listJobsTest() { - String nextPageToken = ""; - Job jobsElement = Job.newBuilder().build(); - List jobs = Arrays.asList(jobsElement); + public void listJobsTest() throws Exception { + Job responsesElement = Job.newBuilder().build(); ListJobsResponse expectedResponse = - ListJobsResponse.newBuilder().setNextPageToken(nextPageToken).addAllJobs(jobs).build(); + ListJobsResponse.newBuilder() + .setNextPageToken("") + .addAllJobs(Arrays.asList(responsesElement)) + .build(); mockJobController.addResponse(expectedResponse); - String projectId = "projectId-1969970175"; + String projectId = "projectId-894832108"; String region = "region-934795532"; ListJobsPagedResponse pagedListResponse = client.listJobs(projectId, region); List resources = Lists.newArrayList(pagedListResponse.iterateAll()); + Assert.assertEquals(1, resources.size()); Assert.assertEquals(expectedResponse.getJobsList().get(0), resources.get(0)); List actualRequests = mockJobController.getRequests(); Assert.assertEquals(1, actualRequests.size()); - ListJobsRequest actualRequest = (ListJobsRequest) actualRequests.get(0); + ListJobsRequest actualRequest = ((ListJobsRequest) actualRequests.get(0)); Assert.assertEquals(projectId, actualRequest.getProjectId()); Assert.assertEquals(region, actualRequest.getRegion()); @@ -296,47 +289,48 @@ public void listJobsTest() { } @Test - @SuppressWarnings("all") public void listJobsExceptionTest() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockJobController.addException(exception); try { - String projectId = "projectId-1969970175"; + String projectId = "projectId-894832108"; String region = "region-934795532"; - client.listJobs(projectId, region); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { - // Expected exception + // Expected exception. } } @Test - @SuppressWarnings("all") - public void listJobsTest2() { - String nextPageToken = ""; - Job jobsElement = Job.newBuilder().build(); - List jobs = Arrays.asList(jobsElement); + public void listJobsTest2() throws Exception { + Job responsesElement = Job.newBuilder().build(); ListJobsResponse expectedResponse = - ListJobsResponse.newBuilder().setNextPageToken(nextPageToken).addAllJobs(jobs).build(); + ListJobsResponse.newBuilder() + .setNextPageToken("") + .addAllJobs(Arrays.asList(responsesElement)) + .build(); mockJobController.addResponse(expectedResponse); - String projectId = "projectId-1969970175"; + String projectId = "projectId-894832108"; String region = "region-934795532"; + String filter = "filter-1274492040"; - ListJobsPagedResponse pagedListResponse = client.listJobs(projectId, region); + ListJobsPagedResponse pagedListResponse = client.listJobs(projectId, region, filter); List resources = Lists.newArrayList(pagedListResponse.iterateAll()); + Assert.assertEquals(1, resources.size()); Assert.assertEquals(expectedResponse.getJobsList().get(0), resources.get(0)); List actualRequests = mockJobController.getRequests(); Assert.assertEquals(1, actualRequests.size()); - ListJobsRequest actualRequest = (ListJobsRequest) actualRequests.get(0); + ListJobsRequest actualRequest = ((ListJobsRequest) actualRequests.get(0)); Assert.assertEquals(projectId, actualRequest.getProjectId()); Assert.assertEquals(region, actualRequest.getRegion()); + Assert.assertEquals(filter, actualRequest.getFilter()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), @@ -344,50 +338,46 @@ public void listJobsTest2() { } @Test - @SuppressWarnings("all") public void listJobsExceptionTest2() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockJobController.addException(exception); try { - String projectId = "projectId-1969970175"; + String projectId = "projectId-894832108"; String region = "region-934795532"; - - client.listJobs(projectId, region); + String filter = "filter-1274492040"; + client.listJobs(projectId, region, filter); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { - // Expected exception + // Expected exception. } } @Test - @SuppressWarnings("all") - public void updateJobTest() { - String driverOutputResourceUri = "driverOutputResourceUri-542229086"; - String driverControlFilesUri = "driverControlFilesUri207057643"; - String jobUuid = "jobUuid-1615012099"; - boolean done = true; + public void updateJobTest() throws Exception { Job expectedResponse = Job.newBuilder() - .setDriverOutputResourceUri(driverOutputResourceUri) - .setDriverControlFilesUri(driverControlFilesUri) - .setJobUuid(jobUuid) - .setDone(done) + .setReference(JobReference.newBuilder().build()) + .setPlacement(JobPlacement.newBuilder().build()) + .setStatus(JobStatus.newBuilder().build()) + .addAllStatusHistory(new ArrayList()) + .addAllYarnApplications(new ArrayList()) + .setDriverOutputResourceUri("driverOutputResourceUri794556277") + .setDriverControlFilesUri("driverControlFilesUri1491948202") + .putAllLabels(new HashMap()) + .setScheduling(JobScheduling.newBuilder().build()) + .setJobUuid("jobUuid-1437868776") + .setDone(true) .build(); mockJobController.addResponse(expectedResponse); - String projectId = "projectId-1969970175"; - String region = "region-934795532"; - String jobId = "jobId-1154752291"; - Job job = Job.newBuilder().build(); - FieldMask updateMask = FieldMask.newBuilder().build(); UpdateJobRequest request = UpdateJobRequest.newBuilder() - .setProjectId(projectId) - .setRegion(region) - .setJobId(jobId) - .setJob(job) - .setUpdateMask(updateMask) + .setProjectId("projectId-894832108") + .setRegion("region-934795532") + .setJobId("jobId101296568") + .setJob(Job.newBuilder().build()) + .setUpdateMask(FieldMask.newBuilder().build()) .build(); Job actualResponse = client.updateJob(request); @@ -395,13 +385,13 @@ public void updateJobTest() { List actualRequests = mockJobController.getRequests(); Assert.assertEquals(1, actualRequests.size()); - UpdateJobRequest actualRequest = (UpdateJobRequest) actualRequests.get(0); + UpdateJobRequest actualRequest = ((UpdateJobRequest) actualRequests.get(0)); - Assert.assertEquals(projectId, actualRequest.getProjectId()); - Assert.assertEquals(region, actualRequest.getRegion()); - Assert.assertEquals(jobId, actualRequest.getJobId()); - Assert.assertEquals(job, actualRequest.getJob()); - Assert.assertEquals(updateMask, actualRequest.getUpdateMask()); + Assert.assertEquals(request.getProjectId(), actualRequest.getProjectId()); + Assert.assertEquals(request.getRegion(), actualRequest.getRegion()); + Assert.assertEquals(request.getJobId(), actualRequest.getJobId()); + Assert.assertEquals(request.getJob(), actualRequest.getJob()); + Assert.assertEquals(request.getUpdateMask(), actualRequest.getUpdateMask()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), @@ -409,59 +399,54 @@ public void updateJobTest() { } @Test - @SuppressWarnings("all") public void updateJobExceptionTest() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockJobController.addException(exception); try { - String projectId = "projectId-1969970175"; - String region = "region-934795532"; - String jobId = "jobId-1154752291"; - Job job = Job.newBuilder().build(); - FieldMask updateMask = FieldMask.newBuilder().build(); UpdateJobRequest request = UpdateJobRequest.newBuilder() - .setProjectId(projectId) - .setRegion(region) - .setJobId(jobId) - .setJob(job) - .setUpdateMask(updateMask) + .setProjectId("projectId-894832108") + .setRegion("region-934795532") + .setJobId("jobId101296568") + .setJob(Job.newBuilder().build()) + .setUpdateMask(FieldMask.newBuilder().build()) .build(); - client.updateJob(request); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { - // Expected exception + // Expected exception. } } @Test - @SuppressWarnings("all") - public void cancelJobTest() { - String driverOutputResourceUri = "driverOutputResourceUri-542229086"; - String driverControlFilesUri = "driverControlFilesUri207057643"; - String jobUuid = "jobUuid-1615012099"; - boolean done = true; + public void cancelJobTest() throws Exception { Job expectedResponse = Job.newBuilder() - .setDriverOutputResourceUri(driverOutputResourceUri) - .setDriverControlFilesUri(driverControlFilesUri) - .setJobUuid(jobUuid) - .setDone(done) + .setReference(JobReference.newBuilder().build()) + .setPlacement(JobPlacement.newBuilder().build()) + .setStatus(JobStatus.newBuilder().build()) + .addAllStatusHistory(new ArrayList()) + .addAllYarnApplications(new ArrayList()) + .setDriverOutputResourceUri("driverOutputResourceUri794556277") + .setDriverControlFilesUri("driverControlFilesUri1491948202") + .putAllLabels(new HashMap()) + .setScheduling(JobScheduling.newBuilder().build()) + .setJobUuid("jobUuid-1437868776") + .setDone(true) .build(); mockJobController.addResponse(expectedResponse); - String projectId = "projectId-1969970175"; + String projectId = "projectId-894832108"; String region = "region-934795532"; - String jobId = "jobId-1154752291"; + String jobId = "jobId101296568"; Job actualResponse = client.cancelJob(projectId, region, jobId); Assert.assertEquals(expectedResponse, actualResponse); List actualRequests = mockJobController.getRequests(); Assert.assertEquals(1, actualRequests.size()); - CancelJobRequest actualRequest = (CancelJobRequest) actualRequests.get(0); + CancelJobRequest actualRequest = ((CancelJobRequest) actualRequests.get(0)); Assert.assertEquals(projectId, actualRequest.getProjectId()); Assert.assertEquals(region, actualRequest.getRegion()); @@ -473,38 +458,35 @@ public void cancelJobTest() { } @Test - @SuppressWarnings("all") public void cancelJobExceptionTest() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockJobController.addException(exception); try { - String projectId = "projectId-1969970175"; + String projectId = "projectId-894832108"; String region = "region-934795532"; - String jobId = "jobId-1154752291"; - + String jobId = "jobId101296568"; client.cancelJob(projectId, region, jobId); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { - // Expected exception + // Expected exception. } } @Test - @SuppressWarnings("all") - public void deleteJobTest() { + public void deleteJobTest() throws Exception { Empty expectedResponse = Empty.newBuilder().build(); mockJobController.addResponse(expectedResponse); - String projectId = "projectId-1969970175"; + String projectId = "projectId-894832108"; String region = "region-934795532"; - String jobId = "jobId-1154752291"; + String jobId = "jobId101296568"; client.deleteJob(projectId, region, jobId); List actualRequests = mockJobController.getRequests(); Assert.assertEquals(1, actualRequests.size()); - DeleteJobRequest actualRequest = (DeleteJobRequest) actualRequests.get(0); + DeleteJobRequest actualRequest = ((DeleteJobRequest) actualRequests.get(0)); Assert.assertEquals(projectId, actualRequest.getProjectId()); Assert.assertEquals(region, actualRequest.getRegion()); @@ -516,20 +498,18 @@ public void deleteJobTest() { } @Test - @SuppressWarnings("all") public void deleteJobExceptionTest() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockJobController.addException(exception); try { - String projectId = "projectId-1969970175"; + String projectId = "projectId-894832108"; String region = "region-934795532"; - String jobId = "jobId-1154752291"; - + String jobId = "jobId101296568"; client.deleteJob(projectId, region, jobId); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { - // Expected exception + // Expected exception. } } } diff --git a/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1/MockAutoscalingPolicyService.java b/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1/MockAutoscalingPolicyService.java index 133ec7bf..a15d9f48 100644 --- a/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1/MockAutoscalingPolicyService.java +++ b/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1/MockAutoscalingPolicyService.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,6 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.dataproc.v1; import com.google.api.core.BetaApi; @@ -20,9 +21,10 @@ import com.google.protobuf.AbstractMessage; import io.grpc.ServerServiceDefinition; import java.util.List; +import javax.annotation.Generated; -@javax.annotation.Generated("by GAPIC") @BetaApi +@Generated("by gapic-generator-java") public class MockAutoscalingPolicyService implements MockGrpcService { private final MockAutoscalingPolicyServiceImpl serviceImpl; diff --git a/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1/MockAutoscalingPolicyServiceImpl.java b/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1/MockAutoscalingPolicyServiceImpl.java index 35daafac..61a4bd95 100644 --- a/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1/MockAutoscalingPolicyServiceImpl.java +++ b/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1/MockAutoscalingPolicyServiceImpl.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,6 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.dataproc.v1; import com.google.api.core.BetaApi; @@ -24,9 +25,10 @@ import java.util.LinkedList; import java.util.List; import java.util.Queue; +import javax.annotation.Generated; -@javax.annotation.Generated("by GAPIC") @BetaApi +@Generated("by gapic-generator-java") public class MockAutoscalingPolicyServiceImpl extends AutoscalingPolicyServiceImplBase { private List requests; private Queue responses; @@ -63,10 +65,10 @@ public void createAutoscalingPolicy( Object response = responses.remove(); if (response instanceof AutoscalingPolicy) { requests.add(request); - responseObserver.onNext((AutoscalingPolicy) response); + responseObserver.onNext(((AutoscalingPolicy) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { - responseObserver.onError((Exception) response); + responseObserver.onError(((Exception) response)); } else { responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); } @@ -78,10 +80,10 @@ public void updateAutoscalingPolicy( Object response = responses.remove(); if (response instanceof AutoscalingPolicy) { requests.add(request); - responseObserver.onNext((AutoscalingPolicy) response); + responseObserver.onNext(((AutoscalingPolicy) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { - responseObserver.onError((Exception) response); + responseObserver.onError(((Exception) response)); } else { responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); } @@ -93,10 +95,10 @@ public void getAutoscalingPolicy( Object response = responses.remove(); if (response instanceof AutoscalingPolicy) { requests.add(request); - responseObserver.onNext((AutoscalingPolicy) response); + responseObserver.onNext(((AutoscalingPolicy) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { - responseObserver.onError((Exception) response); + responseObserver.onError(((Exception) response)); } else { responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); } @@ -109,10 +111,10 @@ public void listAutoscalingPolicies( Object response = responses.remove(); if (response instanceof ListAutoscalingPoliciesResponse) { requests.add(request); - responseObserver.onNext((ListAutoscalingPoliciesResponse) response); + responseObserver.onNext(((ListAutoscalingPoliciesResponse) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { - responseObserver.onError((Exception) response); + responseObserver.onError(((Exception) response)); } else { responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); } @@ -124,10 +126,10 @@ public void deleteAutoscalingPolicy( Object response = responses.remove(); if (response instanceof Empty) { requests.add(request); - responseObserver.onNext((Empty) response); + responseObserver.onNext(((Empty) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { - responseObserver.onError((Exception) response); + responseObserver.onError(((Exception) response)); } else { responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); } diff --git a/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1/MockClusterController.java b/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1/MockClusterController.java index cf367dc6..57479579 100644 --- a/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1/MockClusterController.java +++ b/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1/MockClusterController.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,6 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.dataproc.v1; import com.google.api.core.BetaApi; @@ -20,9 +21,10 @@ import com.google.protobuf.AbstractMessage; import io.grpc.ServerServiceDefinition; import java.util.List; +import javax.annotation.Generated; -@javax.annotation.Generated("by GAPIC") @BetaApi +@Generated("by gapic-generator-java") public class MockClusterController implements MockGrpcService { private final MockClusterControllerImpl serviceImpl; diff --git a/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1/MockClusterControllerImpl.java b/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1/MockClusterControllerImpl.java index cf3f06ae..de4fcbdf 100644 --- a/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1/MockClusterControllerImpl.java +++ b/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1/MockClusterControllerImpl.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,6 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.dataproc.v1; import com.google.api.core.BetaApi; @@ -24,9 +25,10 @@ import java.util.LinkedList; import java.util.List; import java.util.Queue; +import javax.annotation.Generated; -@javax.annotation.Generated("by GAPIC") @BetaApi +@Generated("by gapic-generator-java") public class MockClusterControllerImpl extends ClusterControllerImplBase { private List requests; private Queue responses; @@ -63,10 +65,10 @@ public void createCluster( Object response = responses.remove(); if (response instanceof Operation) { requests.add(request); - responseObserver.onNext((Operation) response); + responseObserver.onNext(((Operation) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { - responseObserver.onError((Exception) response); + responseObserver.onError(((Exception) response)); } else { responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); } @@ -78,10 +80,10 @@ public void updateCluster( Object response = responses.remove(); if (response instanceof Operation) { requests.add(request); - responseObserver.onNext((Operation) response); + responseObserver.onNext(((Operation) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { - responseObserver.onError((Exception) response); + responseObserver.onError(((Exception) response)); } else { responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); } @@ -93,10 +95,10 @@ public void deleteCluster( Object response = responses.remove(); if (response instanceof Operation) { requests.add(request); - responseObserver.onNext((Operation) response); + responseObserver.onNext(((Operation) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { - responseObserver.onError((Exception) response); + responseObserver.onError(((Exception) response)); } else { responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); } @@ -107,10 +109,10 @@ public void getCluster(GetClusterRequest request, StreamObserver respon Object response = responses.remove(); if (response instanceof Cluster) { requests.add(request); - responseObserver.onNext((Cluster) response); + responseObserver.onNext(((Cluster) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { - responseObserver.onError((Exception) response); + responseObserver.onError(((Exception) response)); } else { responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); } @@ -122,10 +124,10 @@ public void listClusters( Object response = responses.remove(); if (response instanceof ListClustersResponse) { requests.add(request); - responseObserver.onNext((ListClustersResponse) response); + responseObserver.onNext(((ListClustersResponse) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { - responseObserver.onError((Exception) response); + responseObserver.onError(((Exception) response)); } else { responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); } @@ -137,10 +139,10 @@ public void diagnoseCluster( Object response = responses.remove(); if (response instanceof Operation) { requests.add(request); - responseObserver.onNext((Operation) response); + responseObserver.onNext(((Operation) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { - responseObserver.onError((Exception) response); + responseObserver.onError(((Exception) response)); } else { responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); } diff --git a/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1/MockJobController.java b/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1/MockJobController.java index 03c81db9..da1f93da 100644 --- a/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1/MockJobController.java +++ b/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1/MockJobController.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,6 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.dataproc.v1; import com.google.api.core.BetaApi; @@ -20,9 +21,10 @@ import com.google.protobuf.AbstractMessage; import io.grpc.ServerServiceDefinition; import java.util.List; +import javax.annotation.Generated; -@javax.annotation.Generated("by GAPIC") @BetaApi +@Generated("by gapic-generator-java") public class MockJobController implements MockGrpcService { private final MockJobControllerImpl serviceImpl; diff --git a/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1/MockJobControllerImpl.java b/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1/MockJobControllerImpl.java index 93f5ed54..448c74a4 100644 --- a/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1/MockJobControllerImpl.java +++ b/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1/MockJobControllerImpl.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,6 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.dataproc.v1; import com.google.api.core.BetaApi; @@ -25,9 +26,10 @@ import java.util.LinkedList; import java.util.List; import java.util.Queue; +import javax.annotation.Generated; -@javax.annotation.Generated("by GAPIC") @BetaApi +@Generated("by gapic-generator-java") public class MockJobControllerImpl extends JobControllerImplBase { private List requests; private Queue responses; @@ -63,10 +65,10 @@ public void submitJob(SubmitJobRequest request, StreamObserver responseObse Object response = responses.remove(); if (response instanceof Job) { requests.add(request); - responseObserver.onNext((Job) response); + responseObserver.onNext(((Job) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { - responseObserver.onError((Exception) response); + responseObserver.onError(((Exception) response)); } else { responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); } @@ -78,10 +80,10 @@ public void submitJobAsOperation( Object response = responses.remove(); if (response instanceof Operation) { requests.add(request); - responseObserver.onNext((Operation) response); + responseObserver.onNext(((Operation) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { - responseObserver.onError((Exception) response); + responseObserver.onError(((Exception) response)); } else { responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); } @@ -92,10 +94,10 @@ public void getJob(GetJobRequest request, StreamObserver responseObserver) Object response = responses.remove(); if (response instanceof Job) { requests.add(request); - responseObserver.onNext((Job) response); + responseObserver.onNext(((Job) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { - responseObserver.onError((Exception) response); + responseObserver.onError(((Exception) response)); } else { responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); } @@ -106,10 +108,10 @@ public void listJobs(ListJobsRequest request, StreamObserver r Object response = responses.remove(); if (response instanceof ListJobsResponse) { requests.add(request); - responseObserver.onNext((ListJobsResponse) response); + responseObserver.onNext(((ListJobsResponse) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { - responseObserver.onError((Exception) response); + responseObserver.onError(((Exception) response)); } else { responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); } @@ -120,10 +122,10 @@ public void updateJob(UpdateJobRequest request, StreamObserver responseObse Object response = responses.remove(); if (response instanceof Job) { requests.add(request); - responseObserver.onNext((Job) response); + responseObserver.onNext(((Job) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { - responseObserver.onError((Exception) response); + responseObserver.onError(((Exception) response)); } else { responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); } @@ -134,10 +136,10 @@ public void cancelJob(CancelJobRequest request, StreamObserver responseObse Object response = responses.remove(); if (response instanceof Job) { requests.add(request); - responseObserver.onNext((Job) response); + responseObserver.onNext(((Job) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { - responseObserver.onError((Exception) response); + responseObserver.onError(((Exception) response)); } else { responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); } @@ -148,10 +150,10 @@ public void deleteJob(DeleteJobRequest request, StreamObserver responseOb Object response = responses.remove(); if (response instanceof Empty) { requests.add(request); - responseObserver.onNext((Empty) response); + responseObserver.onNext(((Empty) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { - responseObserver.onError((Exception) response); + responseObserver.onError(((Exception) response)); } else { responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); } diff --git a/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1/MockWorkflowTemplateService.java b/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1/MockWorkflowTemplateService.java index 451a7b0b..d42a4daf 100644 --- a/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1/MockWorkflowTemplateService.java +++ b/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1/MockWorkflowTemplateService.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,6 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.dataproc.v1; import com.google.api.core.BetaApi; @@ -20,9 +21,10 @@ import com.google.protobuf.AbstractMessage; import io.grpc.ServerServiceDefinition; import java.util.List; +import javax.annotation.Generated; -@javax.annotation.Generated("by GAPIC") @BetaApi +@Generated("by gapic-generator-java") public class MockWorkflowTemplateService implements MockGrpcService { private final MockWorkflowTemplateServiceImpl serviceImpl; diff --git a/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1/MockWorkflowTemplateServiceImpl.java b/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1/MockWorkflowTemplateServiceImpl.java index 74dbcc07..ee09193d 100644 --- a/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1/MockWorkflowTemplateServiceImpl.java +++ b/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1/MockWorkflowTemplateServiceImpl.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,6 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.dataproc.v1; import com.google.api.core.BetaApi; @@ -25,9 +26,10 @@ import java.util.LinkedList; import java.util.List; import java.util.Queue; +import javax.annotation.Generated; -@javax.annotation.Generated("by GAPIC") @BetaApi +@Generated("by gapic-generator-java") public class MockWorkflowTemplateServiceImpl extends WorkflowTemplateServiceImplBase { private List requests; private Queue responses; @@ -64,10 +66,10 @@ public void createWorkflowTemplate( Object response = responses.remove(); if (response instanceof WorkflowTemplate) { requests.add(request); - responseObserver.onNext((WorkflowTemplate) response); + responseObserver.onNext(((WorkflowTemplate) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { - responseObserver.onError((Exception) response); + responseObserver.onError(((Exception) response)); } else { responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); } @@ -79,10 +81,10 @@ public void getWorkflowTemplate( Object response = responses.remove(); if (response instanceof WorkflowTemplate) { requests.add(request); - responseObserver.onNext((WorkflowTemplate) response); + responseObserver.onNext(((WorkflowTemplate) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { - responseObserver.onError((Exception) response); + responseObserver.onError(((Exception) response)); } else { responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); } @@ -94,10 +96,10 @@ public void instantiateWorkflowTemplate( Object response = responses.remove(); if (response instanceof Operation) { requests.add(request); - responseObserver.onNext((Operation) response); + responseObserver.onNext(((Operation) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { - responseObserver.onError((Exception) response); + responseObserver.onError(((Exception) response)); } else { responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); } @@ -110,10 +112,10 @@ public void instantiateInlineWorkflowTemplate( Object response = responses.remove(); if (response instanceof Operation) { requests.add(request); - responseObserver.onNext((Operation) response); + responseObserver.onNext(((Operation) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { - responseObserver.onError((Exception) response); + responseObserver.onError(((Exception) response)); } else { responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); } @@ -125,10 +127,10 @@ public void updateWorkflowTemplate( Object response = responses.remove(); if (response instanceof WorkflowTemplate) { requests.add(request); - responseObserver.onNext((WorkflowTemplate) response); + responseObserver.onNext(((WorkflowTemplate) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { - responseObserver.onError((Exception) response); + responseObserver.onError(((Exception) response)); } else { responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); } @@ -141,10 +143,10 @@ public void listWorkflowTemplates( Object response = responses.remove(); if (response instanceof ListWorkflowTemplatesResponse) { requests.add(request); - responseObserver.onNext((ListWorkflowTemplatesResponse) response); + responseObserver.onNext(((ListWorkflowTemplatesResponse) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { - responseObserver.onError((Exception) response); + responseObserver.onError(((Exception) response)); } else { responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); } @@ -156,10 +158,10 @@ public void deleteWorkflowTemplate( Object response = responses.remove(); if (response instanceof Empty) { requests.add(request); - responseObserver.onNext((Empty) response); + responseObserver.onNext(((Empty) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { - responseObserver.onError((Exception) response); + responseObserver.onError(((Exception) response)); } else { responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); } diff --git a/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1/WorkflowTemplateServiceClientTest.java b/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1/WorkflowTemplateServiceClientTest.java index 86de1d08..051b20e0 100644 --- a/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1/WorkflowTemplateServiceClientTest.java +++ b/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1/WorkflowTemplateServiceClientTest.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,6 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.dataproc.v1; import static com.google.cloud.dataproc.v1.WorkflowTemplateServiceClient.ListWorkflowTemplatesPagedResponse; @@ -30,15 +31,17 @@ import com.google.protobuf.AbstractMessage; import com.google.protobuf.Any; import com.google.protobuf.Empty; -import io.grpc.Status; +import com.google.protobuf.Timestamp; import io.grpc.StatusRuntimeException; import java.io.IOException; +import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.UUID; import java.util.concurrent.ExecutionException; +import javax.annotation.Generated; import org.junit.After; import org.junit.AfterClass; import org.junit.Assert; @@ -46,42 +49,32 @@ import org.junit.BeforeClass; import org.junit.Test; -@javax.annotation.Generated("by GAPIC") +@Generated("by gapic-generator-java") public class WorkflowTemplateServiceClientTest { - private static MockAutoscalingPolicyService mockAutoscalingPolicyService; - private static MockClusterController mockClusterController; - private static MockJobController mockJobController; private static MockWorkflowTemplateService mockWorkflowTemplateService; - private static MockServiceHelper serviceHelper; + private static MockServiceHelper mockServiceHelper; private WorkflowTemplateServiceClient client; private LocalChannelProvider channelProvider; @BeforeClass public static void startStaticServer() { - mockAutoscalingPolicyService = new MockAutoscalingPolicyService(); - mockClusterController = new MockClusterController(); - mockJobController = new MockJobController(); mockWorkflowTemplateService = new MockWorkflowTemplateService(); - serviceHelper = + mockServiceHelper = new MockServiceHelper( UUID.randomUUID().toString(), - Arrays.asList( - mockAutoscalingPolicyService, - mockClusterController, - mockJobController, - mockWorkflowTemplateService)); - serviceHelper.start(); + Arrays.asList(mockWorkflowTemplateService)); + mockServiceHelper.start(); } @AfterClass public static void stopServer() { - serviceHelper.stop(); + mockServiceHelper.stop(); } @Before public void setUp() throws IOException { - serviceHelper.reset(); - channelProvider = serviceHelper.createChannelProvider(); + mockServiceHelper.reset(); + channelProvider = mockServiceHelper.createChannelProvider(); WorkflowTemplateServiceSettings settings = WorkflowTemplateServiceSettings.newBuilder() .setTransportChannelProvider(channelProvider) @@ -96,7 +89,267 @@ public void tearDown() throws Exception { } @Test - @SuppressWarnings("all") + public void createWorkflowTemplateTest() throws Exception { + WorkflowTemplate expectedResponse = + WorkflowTemplate.newBuilder() + .setId("id3355") + .setName( + WorkflowTemplateName.ofProjectRegionWorkflowTemplateName( + "[PROJECT]", "[REGION]", "[WORKFLOW_TEMPLATE]") + .toString()) + .setVersion(351608024) + .setCreateTime(Timestamp.newBuilder().build()) + .setUpdateTime(Timestamp.newBuilder().build()) + .putAllLabels(new HashMap()) + .setPlacement(WorkflowTemplatePlacement.newBuilder().build()) + .addAllJobs(new ArrayList()) + .addAllParameters(new ArrayList()) + .build(); + mockWorkflowTemplateService.addResponse(expectedResponse); + + LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]"); + WorkflowTemplate template = WorkflowTemplate.newBuilder().build(); + + WorkflowTemplate actualResponse = client.createWorkflowTemplate(parent, template); + Assert.assertEquals(expectedResponse, actualResponse); + + List actualRequests = mockWorkflowTemplateService.getRequests(); + Assert.assertEquals(1, actualRequests.size()); + CreateWorkflowTemplateRequest actualRequest = + ((CreateWorkflowTemplateRequest) actualRequests.get(0)); + + Assert.assertEquals(parent.toString(), actualRequest.getParent()); + Assert.assertEquals(template, actualRequest.getTemplate()); + Assert.assertTrue( + channelProvider.isHeaderSent( + ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), + GaxGrpcProperties.getDefaultApiClientHeaderPattern())); + } + + @Test + public void createWorkflowTemplateExceptionTest() throws Exception { + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); + mockWorkflowTemplateService.addException(exception); + + try { + LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]"); + WorkflowTemplate template = WorkflowTemplate.newBuilder().build(); + client.createWorkflowTemplate(parent, template); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception. + } + } + + @Test + public void createWorkflowTemplateTest2() throws Exception { + WorkflowTemplate expectedResponse = + WorkflowTemplate.newBuilder() + .setId("id3355") + .setName( + WorkflowTemplateName.ofProjectRegionWorkflowTemplateName( + "[PROJECT]", "[REGION]", "[WORKFLOW_TEMPLATE]") + .toString()) + .setVersion(351608024) + .setCreateTime(Timestamp.newBuilder().build()) + .setUpdateTime(Timestamp.newBuilder().build()) + .putAllLabels(new HashMap()) + .setPlacement(WorkflowTemplatePlacement.newBuilder().build()) + .addAllJobs(new ArrayList()) + .addAllParameters(new ArrayList()) + .build(); + mockWorkflowTemplateService.addResponse(expectedResponse); + + RegionName parent = RegionName.of("[PROJECT]", "[REGION]"); + WorkflowTemplate template = WorkflowTemplate.newBuilder().build(); + + WorkflowTemplate actualResponse = client.createWorkflowTemplate(parent, template); + Assert.assertEquals(expectedResponse, actualResponse); + + List actualRequests = mockWorkflowTemplateService.getRequests(); + Assert.assertEquals(1, actualRequests.size()); + CreateWorkflowTemplateRequest actualRequest = + ((CreateWorkflowTemplateRequest) actualRequests.get(0)); + + Assert.assertEquals(parent.toString(), actualRequest.getParent()); + Assert.assertEquals(template, actualRequest.getTemplate()); + Assert.assertTrue( + channelProvider.isHeaderSent( + ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), + GaxGrpcProperties.getDefaultApiClientHeaderPattern())); + } + + @Test + public void createWorkflowTemplateExceptionTest2() throws Exception { + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); + mockWorkflowTemplateService.addException(exception); + + try { + RegionName parent = RegionName.of("[PROJECT]", "[REGION]"); + WorkflowTemplate template = WorkflowTemplate.newBuilder().build(); + client.createWorkflowTemplate(parent, template); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception. + } + } + + @Test + public void createWorkflowTemplateTest3() throws Exception { + WorkflowTemplate expectedResponse = + WorkflowTemplate.newBuilder() + .setId("id3355") + .setName( + WorkflowTemplateName.ofProjectRegionWorkflowTemplateName( + "[PROJECT]", "[REGION]", "[WORKFLOW_TEMPLATE]") + .toString()) + .setVersion(351608024) + .setCreateTime(Timestamp.newBuilder().build()) + .setUpdateTime(Timestamp.newBuilder().build()) + .putAllLabels(new HashMap()) + .setPlacement(WorkflowTemplatePlacement.newBuilder().build()) + .addAllJobs(new ArrayList()) + .addAllParameters(new ArrayList()) + .build(); + mockWorkflowTemplateService.addResponse(expectedResponse); + + String parent = "parent-995424086"; + WorkflowTemplate template = WorkflowTemplate.newBuilder().build(); + + WorkflowTemplate actualResponse = client.createWorkflowTemplate(parent, template); + Assert.assertEquals(expectedResponse, actualResponse); + + List actualRequests = mockWorkflowTemplateService.getRequests(); + Assert.assertEquals(1, actualRequests.size()); + CreateWorkflowTemplateRequest actualRequest = + ((CreateWorkflowTemplateRequest) actualRequests.get(0)); + + Assert.assertEquals(parent, actualRequest.getParent()); + Assert.assertEquals(template, actualRequest.getTemplate()); + Assert.assertTrue( + channelProvider.isHeaderSent( + ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), + GaxGrpcProperties.getDefaultApiClientHeaderPattern())); + } + + @Test + public void createWorkflowTemplateExceptionTest3() throws Exception { + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); + mockWorkflowTemplateService.addException(exception); + + try { + String parent = "parent-995424086"; + WorkflowTemplate template = WorkflowTemplate.newBuilder().build(); + client.createWorkflowTemplate(parent, template); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception. + } + } + + @Test + public void getWorkflowTemplateTest() throws Exception { + WorkflowTemplate expectedResponse = + WorkflowTemplate.newBuilder() + .setId("id3355") + .setName( + WorkflowTemplateName.ofProjectRegionWorkflowTemplateName( + "[PROJECT]", "[REGION]", "[WORKFLOW_TEMPLATE]") + .toString()) + .setVersion(351608024) + .setCreateTime(Timestamp.newBuilder().build()) + .setUpdateTime(Timestamp.newBuilder().build()) + .putAllLabels(new HashMap()) + .setPlacement(WorkflowTemplatePlacement.newBuilder().build()) + .addAllJobs(new ArrayList()) + .addAllParameters(new ArrayList()) + .build(); + mockWorkflowTemplateService.addResponse(expectedResponse); + + WorkflowTemplateName name = + WorkflowTemplateName.ofProjectRegionWorkflowTemplateName( + "[PROJECT]", "[REGION]", "[WORKFLOW_TEMPLATE]"); + + WorkflowTemplate actualResponse = client.getWorkflowTemplate(name); + Assert.assertEquals(expectedResponse, actualResponse); + + List actualRequests = mockWorkflowTemplateService.getRequests(); + Assert.assertEquals(1, actualRequests.size()); + GetWorkflowTemplateRequest actualRequest = ((GetWorkflowTemplateRequest) actualRequests.get(0)); + + Assert.assertEquals(name.toString(), actualRequest.getName()); + Assert.assertTrue( + channelProvider.isHeaderSent( + ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), + GaxGrpcProperties.getDefaultApiClientHeaderPattern())); + } + + @Test + public void getWorkflowTemplateExceptionTest() throws Exception { + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); + mockWorkflowTemplateService.addException(exception); + + try { + WorkflowTemplateName name = + WorkflowTemplateName.ofProjectRegionWorkflowTemplateName( + "[PROJECT]", "[REGION]", "[WORKFLOW_TEMPLATE]"); + client.getWorkflowTemplate(name); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception. + } + } + + @Test + public void getWorkflowTemplateTest2() throws Exception { + WorkflowTemplate expectedResponse = + WorkflowTemplate.newBuilder() + .setId("id3355") + .setName( + WorkflowTemplateName.ofProjectRegionWorkflowTemplateName( + "[PROJECT]", "[REGION]", "[WORKFLOW_TEMPLATE]") + .toString()) + .setVersion(351608024) + .setCreateTime(Timestamp.newBuilder().build()) + .setUpdateTime(Timestamp.newBuilder().build()) + .putAllLabels(new HashMap()) + .setPlacement(WorkflowTemplatePlacement.newBuilder().build()) + .addAllJobs(new ArrayList()) + .addAllParameters(new ArrayList()) + .build(); + mockWorkflowTemplateService.addResponse(expectedResponse); + + String name = "name3373707"; + + WorkflowTemplate actualResponse = client.getWorkflowTemplate(name); + Assert.assertEquals(expectedResponse, actualResponse); + + List actualRequests = mockWorkflowTemplateService.getRequests(); + Assert.assertEquals(1, actualRequests.size()); + GetWorkflowTemplateRequest actualRequest = ((GetWorkflowTemplateRequest) actualRequests.get(0)); + + Assert.assertEquals(name, actualRequest.getName()); + Assert.assertTrue( + channelProvider.isHeaderSent( + ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), + GaxGrpcProperties.getDefaultApiClientHeaderPattern())); + } + + @Test + public void getWorkflowTemplateExceptionTest2() throws Exception { + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); + mockWorkflowTemplateService.addException(exception); + + try { + String name = "name3373707"; + client.getWorkflowTemplate(name); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception. + } + } + + @Test public void instantiateWorkflowTemplateTest() throws Exception { Empty expectedResponse = Empty.newBuilder().build(); Operation resultOperation = @@ -111,15 +364,14 @@ public void instantiateWorkflowTemplateTest() throws Exception { WorkflowTemplateName.ofProjectRegionWorkflowTemplateName( "[PROJECT]", "[REGION]", "[WORKFLOW_TEMPLATE]"); - Empty actualResponse = client.instantiateWorkflowTemplateAsync(name).get(); - Assert.assertEquals(expectedResponse, actualResponse); + client.instantiateWorkflowTemplateAsync(name).get(); List actualRequests = mockWorkflowTemplateService.getRequests(); Assert.assertEquals(1, actualRequests.size()); InstantiateWorkflowTemplateRequest actualRequest = - (InstantiateWorkflowTemplateRequest) actualRequests.get(0); + ((InstantiateWorkflowTemplateRequest) actualRequests.get(0)); - Assert.assertEquals(name, WorkflowTemplateName.parse(actualRequest.getName())); + Assert.assertEquals(name.toString(), actualRequest.getName()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), @@ -127,32 +379,72 @@ public void instantiateWorkflowTemplateTest() throws Exception { } @Test - @SuppressWarnings("all") public void instantiateWorkflowTemplateExceptionTest() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockWorkflowTemplateService.addException(exception); try { WorkflowTemplateName name = WorkflowTemplateName.ofProjectRegionWorkflowTemplateName( "[PROJECT]", "[REGION]", "[WORKFLOW_TEMPLATE]"); - client.instantiateWorkflowTemplateAsync(name).get(); Assert.fail("No exception raised"); } catch (ExecutionException e) { Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass()); - InvalidArgumentException apiException = (InvalidArgumentException) e.getCause(); + InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause()); Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode()); } } @Test - @SuppressWarnings("all") public void instantiateWorkflowTemplateTest2() throws Exception { Empty expectedResponse = Empty.newBuilder().build(); Operation resultOperation = Operation.newBuilder() - .setName("instantiateWorkflowTemplateTest2") + .setName("instantiateWorkflowTemplateTest") + .setDone(true) + .setResponse(Any.pack(expectedResponse)) + .build(); + mockWorkflowTemplateService.addResponse(resultOperation); + + String name = "name3373707"; + + client.instantiateWorkflowTemplateAsync(name).get(); + + List actualRequests = mockWorkflowTemplateService.getRequests(); + Assert.assertEquals(1, actualRequests.size()); + InstantiateWorkflowTemplateRequest actualRequest = + ((InstantiateWorkflowTemplateRequest) actualRequests.get(0)); + + Assert.assertEquals(name, actualRequest.getName()); + Assert.assertTrue( + channelProvider.isHeaderSent( + ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), + GaxGrpcProperties.getDefaultApiClientHeaderPattern())); + } + + @Test + public void instantiateWorkflowTemplateExceptionTest2() throws Exception { + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); + mockWorkflowTemplateService.addException(exception); + + try { + String name = "name3373707"; + client.instantiateWorkflowTemplateAsync(name).get(); + Assert.fail("No exception raised"); + } catch (ExecutionException e) { + Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass()); + InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause()); + Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode()); + } + } + + @Test + public void instantiateWorkflowTemplateTest3() throws Exception { + Empty expectedResponse = Empty.newBuilder().build(); + Operation resultOperation = + Operation.newBuilder() + .setName("instantiateWorkflowTemplateTest") .setDone(true) .setResponse(Any.pack(expectedResponse)) .build(); @@ -163,15 +455,14 @@ public void instantiateWorkflowTemplateTest2() throws Exception { "[PROJECT]", "[REGION]", "[WORKFLOW_TEMPLATE]"); Map parameters = new HashMap<>(); - Empty actualResponse = client.instantiateWorkflowTemplateAsync(name, parameters).get(); - Assert.assertEquals(expectedResponse, actualResponse); + client.instantiateWorkflowTemplateAsync(name, parameters).get(); List actualRequests = mockWorkflowTemplateService.getRequests(); Assert.assertEquals(1, actualRequests.size()); InstantiateWorkflowTemplateRequest actualRequest = - (InstantiateWorkflowTemplateRequest) actualRequests.get(0); + ((InstantiateWorkflowTemplateRequest) actualRequests.get(0)); - Assert.assertEquals(name, WorkflowTemplateName.parse(actualRequest.getName())); + Assert.assertEquals(name.toString(), actualRequest.getName()); Assert.assertEquals(parameters, actualRequest.getParametersMap()); Assert.assertTrue( channelProvider.isHeaderSent( @@ -180,9 +471,8 @@ public void instantiateWorkflowTemplateTest2() throws Exception { } @Test - @SuppressWarnings("all") - public void instantiateWorkflowTemplateExceptionTest2() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); + public void instantiateWorkflowTemplateExceptionTest3() throws Exception { + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockWorkflowTemplateService.addException(exception); try { @@ -190,18 +480,62 @@ public void instantiateWorkflowTemplateExceptionTest2() throws Exception { WorkflowTemplateName.ofProjectRegionWorkflowTemplateName( "[PROJECT]", "[REGION]", "[WORKFLOW_TEMPLATE]"); Map parameters = new HashMap<>(); + client.instantiateWorkflowTemplateAsync(name, parameters).get(); + Assert.fail("No exception raised"); + } catch (ExecutionException e) { + Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass()); + InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause()); + Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode()); + } + } + + @Test + public void instantiateWorkflowTemplateTest4() throws Exception { + Empty expectedResponse = Empty.newBuilder().build(); + Operation resultOperation = + Operation.newBuilder() + .setName("instantiateWorkflowTemplateTest") + .setDone(true) + .setResponse(Any.pack(expectedResponse)) + .build(); + mockWorkflowTemplateService.addResponse(resultOperation); + + String name = "name3373707"; + Map parameters = new HashMap<>(); + + client.instantiateWorkflowTemplateAsync(name, parameters).get(); + List actualRequests = mockWorkflowTemplateService.getRequests(); + Assert.assertEquals(1, actualRequests.size()); + InstantiateWorkflowTemplateRequest actualRequest = + ((InstantiateWorkflowTemplateRequest) actualRequests.get(0)); + + Assert.assertEquals(name, actualRequest.getName()); + Assert.assertEquals(parameters, actualRequest.getParametersMap()); + Assert.assertTrue( + channelProvider.isHeaderSent( + ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), + GaxGrpcProperties.getDefaultApiClientHeaderPattern())); + } + + @Test + public void instantiateWorkflowTemplateExceptionTest4() throws Exception { + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); + mockWorkflowTemplateService.addException(exception); + + try { + String name = "name3373707"; + Map parameters = new HashMap<>(); client.instantiateWorkflowTemplateAsync(name, parameters).get(); Assert.fail("No exception raised"); } catch (ExecutionException e) { Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass()); - InvalidArgumentException apiException = (InvalidArgumentException) e.getCause(); + InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause()); Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode()); } } @Test - @SuppressWarnings("all") public void instantiateInlineWorkflowTemplateTest() throws Exception { Empty expectedResponse = Empty.newBuilder().build(); Operation resultOperation = @@ -212,18 +546,17 @@ public void instantiateInlineWorkflowTemplateTest() throws Exception { .build(); mockWorkflowTemplateService.addResponse(resultOperation); - RegionName parent = RegionName.of("[PROJECT]", "[REGION]"); + LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]"); WorkflowTemplate template = WorkflowTemplate.newBuilder().build(); - Empty actualResponse = client.instantiateInlineWorkflowTemplateAsync(parent, template).get(); - Assert.assertEquals(expectedResponse, actualResponse); + client.instantiateInlineWorkflowTemplateAsync(parent, template).get(); List actualRequests = mockWorkflowTemplateService.getRequests(); Assert.assertEquals(1, actualRequests.size()); InstantiateInlineWorkflowTemplateRequest actualRequest = - (InstantiateInlineWorkflowTemplateRequest) actualRequests.get(0); + ((InstantiateInlineWorkflowTemplateRequest) actualRequests.get(0)); - Assert.assertEquals(parent, RegionName.parse(actualRequest.getParent())); + Assert.assertEquals(parent.toString(), actualRequest.getParent()); Assert.assertEquals(template, actualRequest.getTemplate()); Assert.assertTrue( channelProvider.isHeaderSent( @@ -232,52 +565,44 @@ public void instantiateInlineWorkflowTemplateTest() throws Exception { } @Test - @SuppressWarnings("all") public void instantiateInlineWorkflowTemplateExceptionTest() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockWorkflowTemplateService.addException(exception); try { - RegionName parent = RegionName.of("[PROJECT]", "[REGION]"); + LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]"); WorkflowTemplate template = WorkflowTemplate.newBuilder().build(); - client.instantiateInlineWorkflowTemplateAsync(parent, template).get(); Assert.fail("No exception raised"); } catch (ExecutionException e) { Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass()); - InvalidArgumentException apiException = (InvalidArgumentException) e.getCause(); + InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause()); Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode()); } } @Test - @SuppressWarnings("all") - public void createWorkflowTemplateTest() { - String id = "id3355"; - WorkflowTemplateName name = - WorkflowTemplateName.ofProjectRegionWorkflowTemplateName( - "[PROJECT]", "[REGION]", "[WORKFLOW_TEMPLATE]"); - int version = 351608024; - WorkflowTemplate expectedResponse = - WorkflowTemplate.newBuilder() - .setId(id) - .setName(name.toString()) - .setVersion(version) + public void instantiateInlineWorkflowTemplateTest2() throws Exception { + Empty expectedResponse = Empty.newBuilder().build(); + Operation resultOperation = + Operation.newBuilder() + .setName("instantiateInlineWorkflowTemplateTest") + .setDone(true) + .setResponse(Any.pack(expectedResponse)) .build(); - mockWorkflowTemplateService.addResponse(expectedResponse); + mockWorkflowTemplateService.addResponse(resultOperation); RegionName parent = RegionName.of("[PROJECT]", "[REGION]"); WorkflowTemplate template = WorkflowTemplate.newBuilder().build(); - WorkflowTemplate actualResponse = client.createWorkflowTemplate(parent, template); - Assert.assertEquals(expectedResponse, actualResponse); + client.instantiateInlineWorkflowTemplateAsync(parent, template).get(); List actualRequests = mockWorkflowTemplateService.getRequests(); Assert.assertEquals(1, actualRequests.size()); - CreateWorkflowTemplateRequest actualRequest = - (CreateWorkflowTemplateRequest) actualRequests.get(0); + InstantiateInlineWorkflowTemplateRequest actualRequest = + ((InstantiateInlineWorkflowTemplateRequest) actualRequests.get(0)); - Assert.assertEquals(parent, RegionName.parse(actualRequest.getParent())); + Assert.assertEquals(parent.toString(), actualRequest.getParent()); Assert.assertEquals(template, actualRequest.getTemplate()); Assert.assertTrue( channelProvider.isHeaderSent( @@ -286,50 +611,45 @@ public void createWorkflowTemplateTest() { } @Test - @SuppressWarnings("all") - public void createWorkflowTemplateExceptionTest() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); + public void instantiateInlineWorkflowTemplateExceptionTest2() throws Exception { + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockWorkflowTemplateService.addException(exception); try { RegionName parent = RegionName.of("[PROJECT]", "[REGION]"); WorkflowTemplate template = WorkflowTemplate.newBuilder().build(); - - client.createWorkflowTemplate(parent, template); + client.instantiateInlineWorkflowTemplateAsync(parent, template).get(); Assert.fail("No exception raised"); - } catch (InvalidArgumentException e) { - // Expected exception + } catch (ExecutionException e) { + Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass()); + InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause()); + Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode()); } } @Test - @SuppressWarnings("all") - public void getWorkflowTemplateTest() { - String id = "id3355"; - WorkflowTemplateName name2 = - WorkflowTemplateName.ofProjectRegionWorkflowTemplateName( - "[PROJECT]", "[REGION]", "[WORKFLOW_TEMPLATE]"); - int version = 351608024; - WorkflowTemplate expectedResponse = - WorkflowTemplate.newBuilder() - .setId(id) - .setName(name2.toString()) - .setVersion(version) + public void instantiateInlineWorkflowTemplateTest3() throws Exception { + Empty expectedResponse = Empty.newBuilder().build(); + Operation resultOperation = + Operation.newBuilder() + .setName("instantiateInlineWorkflowTemplateTest") + .setDone(true) + .setResponse(Any.pack(expectedResponse)) .build(); - mockWorkflowTemplateService.addResponse(expectedResponse); + mockWorkflowTemplateService.addResponse(resultOperation); - WorkflowTemplateName name = - WorkflowTemplateName.ofProjectRegionWorkflowTemplateName( - "[PROJECT]", "[REGION]", "[WORKFLOW_TEMPLATE]"); + String parent = "parent-995424086"; + WorkflowTemplate template = WorkflowTemplate.newBuilder().build(); - WorkflowTemplate actualResponse = client.getWorkflowTemplate(name); - Assert.assertEquals(expectedResponse, actualResponse); + client.instantiateInlineWorkflowTemplateAsync(parent, template).get(); List actualRequests = mockWorkflowTemplateService.getRequests(); Assert.assertEquals(1, actualRequests.size()); - GetWorkflowTemplateRequest actualRequest = (GetWorkflowTemplateRequest) actualRequests.get(0); + InstantiateInlineWorkflowTemplateRequest actualRequest = + ((InstantiateInlineWorkflowTemplateRequest) actualRequests.get(0)); - Assert.assertEquals(name, WorkflowTemplateName.parse(actualRequest.getName())); + Assert.assertEquals(parent, actualRequest.getParent()); + Assert.assertEquals(template, actualRequest.getTemplate()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), @@ -337,36 +657,38 @@ public void getWorkflowTemplateTest() { } @Test - @SuppressWarnings("all") - public void getWorkflowTemplateExceptionTest() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); + public void instantiateInlineWorkflowTemplateExceptionTest3() throws Exception { + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockWorkflowTemplateService.addException(exception); try { - WorkflowTemplateName name = - WorkflowTemplateName.ofProjectRegionWorkflowTemplateName( - "[PROJECT]", "[REGION]", "[WORKFLOW_TEMPLATE]"); - - client.getWorkflowTemplate(name); + String parent = "parent-995424086"; + WorkflowTemplate template = WorkflowTemplate.newBuilder().build(); + client.instantiateInlineWorkflowTemplateAsync(parent, template).get(); Assert.fail("No exception raised"); - } catch (InvalidArgumentException e) { - // Expected exception + } catch (ExecutionException e) { + Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass()); + InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause()); + Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode()); } } @Test - @SuppressWarnings("all") - public void updateWorkflowTemplateTest() { - String id = "id3355"; - WorkflowTemplateName name = - WorkflowTemplateName.ofProjectRegionWorkflowTemplateName( - "[PROJECT]", "[REGION]", "[WORKFLOW_TEMPLATE]"); - int version = 351608024; + public void updateWorkflowTemplateTest() throws Exception { WorkflowTemplate expectedResponse = WorkflowTemplate.newBuilder() - .setId(id) - .setName(name.toString()) - .setVersion(version) + .setId("id3355") + .setName( + WorkflowTemplateName.ofProjectRegionWorkflowTemplateName( + "[PROJECT]", "[REGION]", "[WORKFLOW_TEMPLATE]") + .toString()) + .setVersion(351608024) + .setCreateTime(Timestamp.newBuilder().build()) + .setUpdateTime(Timestamp.newBuilder().build()) + .putAllLabels(new HashMap()) + .setPlacement(WorkflowTemplatePlacement.newBuilder().build()) + .addAllJobs(new ArrayList()) + .addAllParameters(new ArrayList()) .build(); mockWorkflowTemplateService.addResponse(expectedResponse); @@ -378,7 +700,7 @@ public void updateWorkflowTemplateTest() { List actualRequests = mockWorkflowTemplateService.getRequests(); Assert.assertEquals(1, actualRequests.size()); UpdateWorkflowTemplateRequest actualRequest = - (UpdateWorkflowTemplateRequest) actualRequests.get(0); + ((UpdateWorkflowTemplateRequest) actualRequests.get(0)); Assert.assertEquals(template, actualRequest.getTemplate()); Assert.assertTrue( @@ -388,48 +710,44 @@ public void updateWorkflowTemplateTest() { } @Test - @SuppressWarnings("all") public void updateWorkflowTemplateExceptionTest() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockWorkflowTemplateService.addException(exception); try { WorkflowTemplate template = WorkflowTemplate.newBuilder().build(); - client.updateWorkflowTemplate(template); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { - // Expected exception + // Expected exception. } } @Test - @SuppressWarnings("all") - public void listWorkflowTemplatesTest() { - String nextPageToken = ""; - WorkflowTemplate templatesElement = WorkflowTemplate.newBuilder().build(); - List templates = Arrays.asList(templatesElement); + public void listWorkflowTemplatesTest() throws Exception { + WorkflowTemplate responsesElement = WorkflowTemplate.newBuilder().build(); ListWorkflowTemplatesResponse expectedResponse = ListWorkflowTemplatesResponse.newBuilder() - .setNextPageToken(nextPageToken) - .addAllTemplates(templates) + .setNextPageToken("") + .addAllTemplates(Arrays.asList(responsesElement)) .build(); mockWorkflowTemplateService.addResponse(expectedResponse); - RegionName parent = RegionName.of("[PROJECT]", "[REGION]"); + LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]"); ListWorkflowTemplatesPagedResponse pagedListResponse = client.listWorkflowTemplates(parent); List resources = Lists.newArrayList(pagedListResponse.iterateAll()); + Assert.assertEquals(1, resources.size()); Assert.assertEquals(expectedResponse.getTemplatesList().get(0), resources.get(0)); List actualRequests = mockWorkflowTemplateService.getRequests(); Assert.assertEquals(1, actualRequests.size()); ListWorkflowTemplatesRequest actualRequest = - (ListWorkflowTemplatesRequest) actualRequests.get(0); + ((ListWorkflowTemplatesRequest) actualRequests.get(0)); - Assert.assertEquals(parent, RegionName.parse(actualRequest.getParent())); + Assert.assertEquals(parent.toString(), actualRequest.getParent()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), @@ -437,24 +755,111 @@ public void listWorkflowTemplatesTest() { } @Test - @SuppressWarnings("all") public void listWorkflowTemplatesExceptionTest() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); + mockWorkflowTemplateService.addException(exception); + + try { + LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]"); + client.listWorkflowTemplates(parent); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception. + } + } + + @Test + public void listWorkflowTemplatesTest2() throws Exception { + WorkflowTemplate responsesElement = WorkflowTemplate.newBuilder().build(); + ListWorkflowTemplatesResponse expectedResponse = + ListWorkflowTemplatesResponse.newBuilder() + .setNextPageToken("") + .addAllTemplates(Arrays.asList(responsesElement)) + .build(); + mockWorkflowTemplateService.addResponse(expectedResponse); + + RegionName parent = RegionName.of("[PROJECT]", "[REGION]"); + + ListWorkflowTemplatesPagedResponse pagedListResponse = client.listWorkflowTemplates(parent); + + List resources = Lists.newArrayList(pagedListResponse.iterateAll()); + + Assert.assertEquals(1, resources.size()); + Assert.assertEquals(expectedResponse.getTemplatesList().get(0), resources.get(0)); + + List actualRequests = mockWorkflowTemplateService.getRequests(); + Assert.assertEquals(1, actualRequests.size()); + ListWorkflowTemplatesRequest actualRequest = + ((ListWorkflowTemplatesRequest) actualRequests.get(0)); + + Assert.assertEquals(parent.toString(), actualRequest.getParent()); + Assert.assertTrue( + channelProvider.isHeaderSent( + ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), + GaxGrpcProperties.getDefaultApiClientHeaderPattern())); + } + + @Test + public void listWorkflowTemplatesExceptionTest2() throws Exception { + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockWorkflowTemplateService.addException(exception); try { RegionName parent = RegionName.of("[PROJECT]", "[REGION]"); + client.listWorkflowTemplates(parent); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception. + } + } + @Test + public void listWorkflowTemplatesTest3() throws Exception { + WorkflowTemplate responsesElement = WorkflowTemplate.newBuilder().build(); + ListWorkflowTemplatesResponse expectedResponse = + ListWorkflowTemplatesResponse.newBuilder() + .setNextPageToken("") + .addAllTemplates(Arrays.asList(responsesElement)) + .build(); + mockWorkflowTemplateService.addResponse(expectedResponse); + + String parent = "parent-995424086"; + + ListWorkflowTemplatesPagedResponse pagedListResponse = client.listWorkflowTemplates(parent); + + List resources = Lists.newArrayList(pagedListResponse.iterateAll()); + + Assert.assertEquals(1, resources.size()); + Assert.assertEquals(expectedResponse.getTemplatesList().get(0), resources.get(0)); + + List actualRequests = mockWorkflowTemplateService.getRequests(); + Assert.assertEquals(1, actualRequests.size()); + ListWorkflowTemplatesRequest actualRequest = + ((ListWorkflowTemplatesRequest) actualRequests.get(0)); + + Assert.assertEquals(parent, actualRequest.getParent()); + Assert.assertTrue( + channelProvider.isHeaderSent( + ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), + GaxGrpcProperties.getDefaultApiClientHeaderPattern())); + } + + @Test + public void listWorkflowTemplatesExceptionTest3() throws Exception { + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); + mockWorkflowTemplateService.addException(exception); + + try { + String parent = "parent-995424086"; client.listWorkflowTemplates(parent); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { - // Expected exception + // Expected exception. } } @Test - @SuppressWarnings("all") - public void deleteWorkflowTemplateTest() { + public void deleteWorkflowTemplateTest() throws Exception { Empty expectedResponse = Empty.newBuilder().build(); mockWorkflowTemplateService.addResponse(expectedResponse); @@ -467,9 +872,9 @@ public void deleteWorkflowTemplateTest() { List actualRequests = mockWorkflowTemplateService.getRequests(); Assert.assertEquals(1, actualRequests.size()); DeleteWorkflowTemplateRequest actualRequest = - (DeleteWorkflowTemplateRequest) actualRequests.get(0); + ((DeleteWorkflowTemplateRequest) actualRequests.get(0)); - Assert.assertEquals(name, WorkflowTemplateName.parse(actualRequest.getName())); + Assert.assertEquals(name.toString(), actualRequest.getName()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), @@ -477,20 +882,53 @@ public void deleteWorkflowTemplateTest() { } @Test - @SuppressWarnings("all") public void deleteWorkflowTemplateExceptionTest() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockWorkflowTemplateService.addException(exception); try { WorkflowTemplateName name = WorkflowTemplateName.ofProjectRegionWorkflowTemplateName( "[PROJECT]", "[REGION]", "[WORKFLOW_TEMPLATE]"); + client.deleteWorkflowTemplate(name); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception. + } + } + @Test + public void deleteWorkflowTemplateTest2() throws Exception { + Empty expectedResponse = Empty.newBuilder().build(); + mockWorkflowTemplateService.addResponse(expectedResponse); + + String name = "name3373707"; + + client.deleteWorkflowTemplate(name); + + List actualRequests = mockWorkflowTemplateService.getRequests(); + Assert.assertEquals(1, actualRequests.size()); + DeleteWorkflowTemplateRequest actualRequest = + ((DeleteWorkflowTemplateRequest) actualRequests.get(0)); + + Assert.assertEquals(name, actualRequest.getName()); + Assert.assertTrue( + channelProvider.isHeaderSent( + ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), + GaxGrpcProperties.getDefaultApiClientHeaderPattern())); + } + + @Test + public void deleteWorkflowTemplateExceptionTest2() throws Exception { + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); + mockWorkflowTemplateService.addException(exception); + + try { + String name = "name3373707"; client.deleteWorkflowTemplate(name); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { - // Expected exception + // Expected exception. } } } diff --git a/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1beta2/AutoscalingPolicyServiceClientTest.java b/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1beta2/AutoscalingPolicyServiceClientTest.java index 7b0b70b8..bdfab55a 100644 --- a/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1beta2/AutoscalingPolicyServiceClientTest.java +++ b/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1beta2/AutoscalingPolicyServiceClientTest.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,6 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.dataproc.v1beta2; import static com.google.cloud.dataproc.v1beta2.AutoscalingPolicyServiceClient.ListAutoscalingPoliciesPagedResponse; @@ -27,12 +28,12 @@ import com.google.common.collect.Lists; import com.google.protobuf.AbstractMessage; import com.google.protobuf.Empty; -import io.grpc.Status; import io.grpc.StatusRuntimeException; import java.io.IOException; import java.util.Arrays; import java.util.List; import java.util.UUID; +import javax.annotation.Generated; import org.junit.After; import org.junit.AfterClass; import org.junit.Assert; @@ -40,42 +41,32 @@ import org.junit.BeforeClass; import org.junit.Test; -@javax.annotation.Generated("by GAPIC") +@Generated("by gapic-generator-java") public class AutoscalingPolicyServiceClientTest { private static MockAutoscalingPolicyService mockAutoscalingPolicyService; - private static MockClusterController mockClusterController; - private static MockJobController mockJobController; - private static MockWorkflowTemplateService mockWorkflowTemplateService; - private static MockServiceHelper serviceHelper; + private static MockServiceHelper mockServiceHelper; private AutoscalingPolicyServiceClient client; private LocalChannelProvider channelProvider; @BeforeClass public static void startStaticServer() { mockAutoscalingPolicyService = new MockAutoscalingPolicyService(); - mockClusterController = new MockClusterController(); - mockJobController = new MockJobController(); - mockWorkflowTemplateService = new MockWorkflowTemplateService(); - serviceHelper = + mockServiceHelper = new MockServiceHelper( UUID.randomUUID().toString(), - Arrays.asList( - mockAutoscalingPolicyService, - mockClusterController, - mockJobController, - mockWorkflowTemplateService)); - serviceHelper.start(); + Arrays.asList(mockAutoscalingPolicyService)); + mockServiceHelper.start(); } @AfterClass public static void stopServer() { - serviceHelper.stop(); + mockServiceHelper.stop(); } @Before public void setUp() throws IOException { - serviceHelper.reset(); - channelProvider = serviceHelper.createChannelProvider(); + mockServiceHelper.reset(); + channelProvider = mockServiceHelper.createChannelProvider(); AutoscalingPolicyServiceSettings settings = AutoscalingPolicyServiceSettings.newBuilder() .setTransportChannelProvider(channelProvider) @@ -90,17 +81,20 @@ public void tearDown() throws Exception { } @Test - @SuppressWarnings("all") - public void createAutoscalingPolicyTest() { - String id = "id3355"; - AutoscalingPolicyName name = - AutoscalingPolicyName.ofProjectLocationAutoscalingPolicyName( - "[PROJECT]", "[LOCATION]", "[AUTOSCALING_POLICY]"); + public void createAutoscalingPolicyTest() throws Exception { AutoscalingPolicy expectedResponse = - AutoscalingPolicy.newBuilder().setId(id).setName(name.toString()).build(); + AutoscalingPolicy.newBuilder() + .setId("id3355") + .setName( + AutoscalingPolicyName.ofProjectLocationAutoscalingPolicyName( + "[PROJECT]", "[LOCATION]", "[AUTOSCALING_POLICY]") + .toString()) + .setWorkerConfig(InstanceGroupAutoscalingPolicyConfig.newBuilder().build()) + .setSecondaryWorkerConfig(InstanceGroupAutoscalingPolicyConfig.newBuilder().build()) + .build(); mockAutoscalingPolicyService.addResponse(expectedResponse); - RegionName parent = RegionName.of("[PROJECT]", "[REGION]"); + LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]"); AutoscalingPolicy policy = AutoscalingPolicy.newBuilder().build(); AutoscalingPolicy actualResponse = client.createAutoscalingPolicy(parent, policy); @@ -109,9 +103,9 @@ public void createAutoscalingPolicyTest() { List actualRequests = mockAutoscalingPolicyService.getRequests(); Assert.assertEquals(1, actualRequests.size()); CreateAutoscalingPolicyRequest actualRequest = - (CreateAutoscalingPolicyRequest) actualRequests.get(0); + ((CreateAutoscalingPolicyRequest) actualRequests.get(0)); - Assert.assertEquals(parent, RegionName.parse(actualRequest.getParent())); + Assert.assertEquals(parent.toString(), actualRequest.getParent()); Assert.assertEquals(policy, actualRequest.getPolicy()); Assert.assertTrue( channelProvider.isHeaderSent( @@ -120,31 +114,128 @@ public void createAutoscalingPolicyTest() { } @Test - @SuppressWarnings("all") public void createAutoscalingPolicyExceptionTest() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); + mockAutoscalingPolicyService.addException(exception); + + try { + LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]"); + AutoscalingPolicy policy = AutoscalingPolicy.newBuilder().build(); + client.createAutoscalingPolicy(parent, policy); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception. + } + } + + @Test + public void createAutoscalingPolicyTest2() throws Exception { + AutoscalingPolicy expectedResponse = + AutoscalingPolicy.newBuilder() + .setId("id3355") + .setName( + AutoscalingPolicyName.ofProjectLocationAutoscalingPolicyName( + "[PROJECT]", "[LOCATION]", "[AUTOSCALING_POLICY]") + .toString()) + .setWorkerConfig(InstanceGroupAutoscalingPolicyConfig.newBuilder().build()) + .setSecondaryWorkerConfig(InstanceGroupAutoscalingPolicyConfig.newBuilder().build()) + .build(); + mockAutoscalingPolicyService.addResponse(expectedResponse); + + RegionName parent = RegionName.of("[PROJECT]", "[REGION]"); + AutoscalingPolicy policy = AutoscalingPolicy.newBuilder().build(); + + AutoscalingPolicy actualResponse = client.createAutoscalingPolicy(parent, policy); + Assert.assertEquals(expectedResponse, actualResponse); + + List actualRequests = mockAutoscalingPolicyService.getRequests(); + Assert.assertEquals(1, actualRequests.size()); + CreateAutoscalingPolicyRequest actualRequest = + ((CreateAutoscalingPolicyRequest) actualRequests.get(0)); + + Assert.assertEquals(parent.toString(), actualRequest.getParent()); + Assert.assertEquals(policy, actualRequest.getPolicy()); + Assert.assertTrue( + channelProvider.isHeaderSent( + ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), + GaxGrpcProperties.getDefaultApiClientHeaderPattern())); + } + + @Test + public void createAutoscalingPolicyExceptionTest2() throws Exception { + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockAutoscalingPolicyService.addException(exception); try { RegionName parent = RegionName.of("[PROJECT]", "[REGION]"); AutoscalingPolicy policy = AutoscalingPolicy.newBuilder().build(); + client.createAutoscalingPolicy(parent, policy); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception. + } + } + + @Test + public void createAutoscalingPolicyTest3() throws Exception { + AutoscalingPolicy expectedResponse = + AutoscalingPolicy.newBuilder() + .setId("id3355") + .setName( + AutoscalingPolicyName.ofProjectLocationAutoscalingPolicyName( + "[PROJECT]", "[LOCATION]", "[AUTOSCALING_POLICY]") + .toString()) + .setWorkerConfig(InstanceGroupAutoscalingPolicyConfig.newBuilder().build()) + .setSecondaryWorkerConfig(InstanceGroupAutoscalingPolicyConfig.newBuilder().build()) + .build(); + mockAutoscalingPolicyService.addResponse(expectedResponse); + + String parent = "parent-995424086"; + AutoscalingPolicy policy = AutoscalingPolicy.newBuilder().build(); + AutoscalingPolicy actualResponse = client.createAutoscalingPolicy(parent, policy); + Assert.assertEquals(expectedResponse, actualResponse); + + List actualRequests = mockAutoscalingPolicyService.getRequests(); + Assert.assertEquals(1, actualRequests.size()); + CreateAutoscalingPolicyRequest actualRequest = + ((CreateAutoscalingPolicyRequest) actualRequests.get(0)); + + Assert.assertEquals(parent, actualRequest.getParent()); + Assert.assertEquals(policy, actualRequest.getPolicy()); + Assert.assertTrue( + channelProvider.isHeaderSent( + ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), + GaxGrpcProperties.getDefaultApiClientHeaderPattern())); + } + + @Test + public void createAutoscalingPolicyExceptionTest3() throws Exception { + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); + mockAutoscalingPolicyService.addException(exception); + + try { + String parent = "parent-995424086"; + AutoscalingPolicy policy = AutoscalingPolicy.newBuilder().build(); client.createAutoscalingPolicy(parent, policy); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { - // Expected exception + // Expected exception. } } @Test - @SuppressWarnings("all") - public void updateAutoscalingPolicyTest() { - String id = "id3355"; - AutoscalingPolicyName name = - AutoscalingPolicyName.ofProjectLocationAutoscalingPolicyName( - "[PROJECT]", "[LOCATION]", "[AUTOSCALING_POLICY]"); + public void updateAutoscalingPolicyTest() throws Exception { AutoscalingPolicy expectedResponse = - AutoscalingPolicy.newBuilder().setId(id).setName(name.toString()).build(); + AutoscalingPolicy.newBuilder() + .setId("id3355") + .setName( + AutoscalingPolicyName.ofProjectLocationAutoscalingPolicyName( + "[PROJECT]", "[LOCATION]", "[AUTOSCALING_POLICY]") + .toString()) + .setWorkerConfig(InstanceGroupAutoscalingPolicyConfig.newBuilder().build()) + .setSecondaryWorkerConfig(InstanceGroupAutoscalingPolicyConfig.newBuilder().build()) + .build(); mockAutoscalingPolicyService.addResponse(expectedResponse); AutoscalingPolicy policy = AutoscalingPolicy.newBuilder().build(); @@ -155,7 +246,7 @@ public void updateAutoscalingPolicyTest() { List actualRequests = mockAutoscalingPolicyService.getRequests(); Assert.assertEquals(1, actualRequests.size()); UpdateAutoscalingPolicyRequest actualRequest = - (UpdateAutoscalingPolicyRequest) actualRequests.get(0); + ((UpdateAutoscalingPolicyRequest) actualRequests.get(0)); Assert.assertEquals(policy, actualRequest.getPolicy()); Assert.assertTrue( @@ -165,30 +256,31 @@ public void updateAutoscalingPolicyTest() { } @Test - @SuppressWarnings("all") public void updateAutoscalingPolicyExceptionTest() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockAutoscalingPolicyService.addException(exception); try { AutoscalingPolicy policy = AutoscalingPolicy.newBuilder().build(); - client.updateAutoscalingPolicy(policy); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { - // Expected exception + // Expected exception. } } @Test - @SuppressWarnings("all") - public void getAutoscalingPolicyTest() { - String id = "id3355"; - AutoscalingPolicyName name2 = - AutoscalingPolicyName.ofProjectLocationAutoscalingPolicyName( - "[PROJECT]", "[LOCATION]", "[AUTOSCALING_POLICY]"); + public void getAutoscalingPolicyTest() throws Exception { AutoscalingPolicy expectedResponse = - AutoscalingPolicy.newBuilder().setId(id).setName(name2.toString()).build(); + AutoscalingPolicy.newBuilder() + .setId("id3355") + .setName( + AutoscalingPolicyName.ofProjectLocationAutoscalingPolicyName( + "[PROJECT]", "[LOCATION]", "[AUTOSCALING_POLICY]") + .toString()) + .setWorkerConfig(InstanceGroupAutoscalingPolicyConfig.newBuilder().build()) + .setSecondaryWorkerConfig(InstanceGroupAutoscalingPolicyConfig.newBuilder().build()) + .build(); mockAutoscalingPolicyService.addResponse(expectedResponse); AutoscalingPolicyName name = @@ -200,9 +292,10 @@ public void getAutoscalingPolicyTest() { List actualRequests = mockAutoscalingPolicyService.getRequests(); Assert.assertEquals(1, actualRequests.size()); - GetAutoscalingPolicyRequest actualRequest = (GetAutoscalingPolicyRequest) actualRequests.get(0); + GetAutoscalingPolicyRequest actualRequest = + ((GetAutoscalingPolicyRequest) actualRequests.get(0)); - Assert.assertEquals(name, AutoscalingPolicyName.parse(actualRequest.getName())); + Assert.assertEquals(name.toString(), actualRequest.getName()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), @@ -210,50 +303,91 @@ public void getAutoscalingPolicyTest() { } @Test - @SuppressWarnings("all") public void getAutoscalingPolicyExceptionTest() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockAutoscalingPolicyService.addException(exception); try { AutoscalingPolicyName name = AutoscalingPolicyName.ofProjectLocationAutoscalingPolicyName( "[PROJECT]", "[LOCATION]", "[AUTOSCALING_POLICY]"); + client.getAutoscalingPolicy(name); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception. + } + } + + @Test + public void getAutoscalingPolicyTest2() throws Exception { + AutoscalingPolicy expectedResponse = + AutoscalingPolicy.newBuilder() + .setId("id3355") + .setName( + AutoscalingPolicyName.ofProjectLocationAutoscalingPolicyName( + "[PROJECT]", "[LOCATION]", "[AUTOSCALING_POLICY]") + .toString()) + .setWorkerConfig(InstanceGroupAutoscalingPolicyConfig.newBuilder().build()) + .setSecondaryWorkerConfig(InstanceGroupAutoscalingPolicyConfig.newBuilder().build()) + .build(); + mockAutoscalingPolicyService.addResponse(expectedResponse); + + String name = "name3373707"; + + AutoscalingPolicy actualResponse = client.getAutoscalingPolicy(name); + Assert.assertEquals(expectedResponse, actualResponse); + + List actualRequests = mockAutoscalingPolicyService.getRequests(); + Assert.assertEquals(1, actualRequests.size()); + GetAutoscalingPolicyRequest actualRequest = + ((GetAutoscalingPolicyRequest) actualRequests.get(0)); + + Assert.assertEquals(name, actualRequest.getName()); + Assert.assertTrue( + channelProvider.isHeaderSent( + ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), + GaxGrpcProperties.getDefaultApiClientHeaderPattern())); + } + @Test + public void getAutoscalingPolicyExceptionTest2() throws Exception { + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); + mockAutoscalingPolicyService.addException(exception); + + try { + String name = "name3373707"; client.getAutoscalingPolicy(name); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { - // Expected exception + // Expected exception. } } @Test - @SuppressWarnings("all") - public void listAutoscalingPoliciesTest() { - String nextPageToken = ""; - AutoscalingPolicy policiesElement = AutoscalingPolicy.newBuilder().build(); - List policies = Arrays.asList(policiesElement); + public void listAutoscalingPoliciesTest() throws Exception { + AutoscalingPolicy responsesElement = AutoscalingPolicy.newBuilder().build(); ListAutoscalingPoliciesResponse expectedResponse = ListAutoscalingPoliciesResponse.newBuilder() - .setNextPageToken(nextPageToken) - .addAllPolicies(policies) + .setNextPageToken("") + .addAllPolicies(Arrays.asList(responsesElement)) .build(); mockAutoscalingPolicyService.addResponse(expectedResponse); - RegionName parent = RegionName.of("[PROJECT]", "[REGION]"); + LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]"); ListAutoscalingPoliciesPagedResponse pagedListResponse = client.listAutoscalingPolicies(parent); List resources = Lists.newArrayList(pagedListResponse.iterateAll()); + Assert.assertEquals(1, resources.size()); Assert.assertEquals(expectedResponse.getPoliciesList().get(0), resources.get(0)); List actualRequests = mockAutoscalingPolicyService.getRequests(); Assert.assertEquals(1, actualRequests.size()); ListAutoscalingPoliciesRequest actualRequest = - (ListAutoscalingPoliciesRequest) actualRequests.get(0); + ((ListAutoscalingPoliciesRequest) actualRequests.get(0)); - Assert.assertEquals(parent, RegionName.parse(actualRequest.getParent())); + Assert.assertEquals(parent.toString(), actualRequest.getParent()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), @@ -261,24 +395,111 @@ public void listAutoscalingPoliciesTest() { } @Test - @SuppressWarnings("all") public void listAutoscalingPoliciesExceptionTest() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); + mockAutoscalingPolicyService.addException(exception); + + try { + LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]"); + client.listAutoscalingPolicies(parent); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception. + } + } + + @Test + public void listAutoscalingPoliciesTest2() throws Exception { + AutoscalingPolicy responsesElement = AutoscalingPolicy.newBuilder().build(); + ListAutoscalingPoliciesResponse expectedResponse = + ListAutoscalingPoliciesResponse.newBuilder() + .setNextPageToken("") + .addAllPolicies(Arrays.asList(responsesElement)) + .build(); + mockAutoscalingPolicyService.addResponse(expectedResponse); + + RegionName parent = RegionName.of("[PROJECT]", "[REGION]"); + + ListAutoscalingPoliciesPagedResponse pagedListResponse = client.listAutoscalingPolicies(parent); + + List resources = Lists.newArrayList(pagedListResponse.iterateAll()); + + Assert.assertEquals(1, resources.size()); + Assert.assertEquals(expectedResponse.getPoliciesList().get(0), resources.get(0)); + + List actualRequests = mockAutoscalingPolicyService.getRequests(); + Assert.assertEquals(1, actualRequests.size()); + ListAutoscalingPoliciesRequest actualRequest = + ((ListAutoscalingPoliciesRequest) actualRequests.get(0)); + + Assert.assertEquals(parent.toString(), actualRequest.getParent()); + Assert.assertTrue( + channelProvider.isHeaderSent( + ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), + GaxGrpcProperties.getDefaultApiClientHeaderPattern())); + } + + @Test + public void listAutoscalingPoliciesExceptionTest2() throws Exception { + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockAutoscalingPolicyService.addException(exception); try { RegionName parent = RegionName.of("[PROJECT]", "[REGION]"); + client.listAutoscalingPolicies(parent); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception. + } + } + + @Test + public void listAutoscalingPoliciesTest3() throws Exception { + AutoscalingPolicy responsesElement = AutoscalingPolicy.newBuilder().build(); + ListAutoscalingPoliciesResponse expectedResponse = + ListAutoscalingPoliciesResponse.newBuilder() + .setNextPageToken("") + .addAllPolicies(Arrays.asList(responsesElement)) + .build(); + mockAutoscalingPolicyService.addResponse(expectedResponse); + + String parent = "parent-995424086"; + + ListAutoscalingPoliciesPagedResponse pagedListResponse = client.listAutoscalingPolicies(parent); + + List resources = Lists.newArrayList(pagedListResponse.iterateAll()); + Assert.assertEquals(1, resources.size()); + Assert.assertEquals(expectedResponse.getPoliciesList().get(0), resources.get(0)); + + List actualRequests = mockAutoscalingPolicyService.getRequests(); + Assert.assertEquals(1, actualRequests.size()); + ListAutoscalingPoliciesRequest actualRequest = + ((ListAutoscalingPoliciesRequest) actualRequests.get(0)); + + Assert.assertEquals(parent, actualRequest.getParent()); + Assert.assertTrue( + channelProvider.isHeaderSent( + ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), + GaxGrpcProperties.getDefaultApiClientHeaderPattern())); + } + + @Test + public void listAutoscalingPoliciesExceptionTest3() throws Exception { + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); + mockAutoscalingPolicyService.addException(exception); + + try { + String parent = "parent-995424086"; client.listAutoscalingPolicies(parent); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { - // Expected exception + // Expected exception. } } @Test - @SuppressWarnings("all") - public void deleteAutoscalingPolicyTest() { + public void deleteAutoscalingPolicyTest() throws Exception { Empty expectedResponse = Empty.newBuilder().build(); mockAutoscalingPolicyService.addResponse(expectedResponse); @@ -291,9 +512,9 @@ public void deleteAutoscalingPolicyTest() { List actualRequests = mockAutoscalingPolicyService.getRequests(); Assert.assertEquals(1, actualRequests.size()); DeleteAutoscalingPolicyRequest actualRequest = - (DeleteAutoscalingPolicyRequest) actualRequests.get(0); + ((DeleteAutoscalingPolicyRequest) actualRequests.get(0)); - Assert.assertEquals(name, AutoscalingPolicyName.parse(actualRequest.getName())); + Assert.assertEquals(name.toString(), actualRequest.getName()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), @@ -301,20 +522,53 @@ public void deleteAutoscalingPolicyTest() { } @Test - @SuppressWarnings("all") public void deleteAutoscalingPolicyExceptionTest() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockAutoscalingPolicyService.addException(exception); try { AutoscalingPolicyName name = AutoscalingPolicyName.ofProjectLocationAutoscalingPolicyName( "[PROJECT]", "[LOCATION]", "[AUTOSCALING_POLICY]"); + client.deleteAutoscalingPolicy(name); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception. + } + } + @Test + public void deleteAutoscalingPolicyTest2() throws Exception { + Empty expectedResponse = Empty.newBuilder().build(); + mockAutoscalingPolicyService.addResponse(expectedResponse); + + String name = "name3373707"; + + client.deleteAutoscalingPolicy(name); + + List actualRequests = mockAutoscalingPolicyService.getRequests(); + Assert.assertEquals(1, actualRequests.size()); + DeleteAutoscalingPolicyRequest actualRequest = + ((DeleteAutoscalingPolicyRequest) actualRequests.get(0)); + + Assert.assertEquals(name, actualRequest.getName()); + Assert.assertTrue( + channelProvider.isHeaderSent( + ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), + GaxGrpcProperties.getDefaultApiClientHeaderPattern())); + } + + @Test + public void deleteAutoscalingPolicyExceptionTest2() throws Exception { + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); + mockAutoscalingPolicyService.addException(exception); + + try { + String name = "name3373707"; client.deleteAutoscalingPolicy(name); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { - // Expected exception + // Expected exception. } } } diff --git a/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1beta2/ClusterControllerClientTest.java b/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1beta2/ClusterControllerClientTest.java index df7d1e24..95520651 100644 --- a/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1beta2/ClusterControllerClientTest.java +++ b/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1beta2/ClusterControllerClientTest.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,6 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.dataproc.v1beta2; import static com.google.cloud.dataproc.v1beta2.ClusterControllerClient.ListClustersPagedResponse; @@ -31,13 +32,15 @@ import com.google.protobuf.Any; import com.google.protobuf.Empty; import com.google.protobuf.FieldMask; -import io.grpc.Status; import io.grpc.StatusRuntimeException; import java.io.IOException; +import java.util.ArrayList; import java.util.Arrays; +import java.util.HashMap; import java.util.List; import java.util.UUID; import java.util.concurrent.ExecutionException; +import javax.annotation.Generated; import org.junit.After; import org.junit.AfterClass; import org.junit.Assert; @@ -45,42 +48,31 @@ import org.junit.BeforeClass; import org.junit.Test; -@javax.annotation.Generated("by GAPIC") +@Generated("by gapic-generator-java") public class ClusterControllerClientTest { - private static MockAutoscalingPolicyService mockAutoscalingPolicyService; private static MockClusterController mockClusterController; - private static MockJobController mockJobController; - private static MockWorkflowTemplateService mockWorkflowTemplateService; - private static MockServiceHelper serviceHelper; + private static MockServiceHelper mockServiceHelper; private ClusterControllerClient client; private LocalChannelProvider channelProvider; @BeforeClass public static void startStaticServer() { - mockAutoscalingPolicyService = new MockAutoscalingPolicyService(); mockClusterController = new MockClusterController(); - mockJobController = new MockJobController(); - mockWorkflowTemplateService = new MockWorkflowTemplateService(); - serviceHelper = + mockServiceHelper = new MockServiceHelper( - UUID.randomUUID().toString(), - Arrays.asList( - mockAutoscalingPolicyService, - mockClusterController, - mockJobController, - mockWorkflowTemplateService)); - serviceHelper.start(); + UUID.randomUUID().toString(), Arrays.asList(mockClusterController)); + mockServiceHelper.start(); } @AfterClass public static void stopServer() { - serviceHelper.stop(); + mockServiceHelper.stop(); } @Before public void setUp() throws IOException { - serviceHelper.reset(); - channelProvider = serviceHelper.createChannelProvider(); + mockServiceHelper.reset(); + channelProvider = mockServiceHelper.createChannelProvider(); ClusterControllerSettings settings = ClusterControllerSettings.newBuilder() .setTransportChannelProvider(channelProvider) @@ -95,16 +87,17 @@ public void tearDown() throws Exception { } @Test - @SuppressWarnings("all") public void createClusterTest() throws Exception { - String projectId2 = "projectId2939242356"; - String clusterName = "clusterName-1018081872"; - String clusterUuid = "clusterUuid-1017854240"; Cluster expectedResponse = Cluster.newBuilder() - .setProjectId(projectId2) - .setClusterName(clusterName) - .setClusterUuid(clusterUuid) + .setProjectId("projectId-894832108") + .setClusterName("clusterName-1141738587") + .setConfig(ClusterConfig.newBuilder().build()) + .putAllLabels(new HashMap()) + .setStatus(ClusterStatus.newBuilder().build()) + .addAllStatusHistory(new ArrayList()) + .setClusterUuid("clusterUuid-1141510955") + .setMetrics(ClusterMetrics.newBuilder().build()) .build(); Operation resultOperation = Operation.newBuilder() @@ -114,7 +107,7 @@ public void createClusterTest() throws Exception { .build(); mockClusterController.addResponse(resultOperation); - String projectId = "projectId-1969970175"; + String projectId = "projectId-894832108"; String region = "region-934795532"; Cluster cluster = Cluster.newBuilder().build(); @@ -123,7 +116,7 @@ public void createClusterTest() throws Exception { List actualRequests = mockClusterController.getRequests(); Assert.assertEquals(1, actualRequests.size()); - CreateClusterRequest actualRequest = (CreateClusterRequest) actualRequests.get(0); + CreateClusterRequest actualRequest = ((CreateClusterRequest) actualRequests.get(0)); Assert.assertEquals(projectId, actualRequest.getProjectId()); Assert.assertEquals(region, actualRequest.getRegion()); @@ -135,36 +128,35 @@ public void createClusterTest() throws Exception { } @Test - @SuppressWarnings("all") public void createClusterExceptionTest() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockClusterController.addException(exception); try { - String projectId = "projectId-1969970175"; + String projectId = "projectId-894832108"; String region = "region-934795532"; Cluster cluster = Cluster.newBuilder().build(); - client.createClusterAsync(projectId, region, cluster).get(); Assert.fail("No exception raised"); } catch (ExecutionException e) { Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass()); - InvalidArgumentException apiException = (InvalidArgumentException) e.getCause(); + InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause()); Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode()); } } @Test - @SuppressWarnings("all") public void updateClusterTest() throws Exception { - String projectId2 = "projectId2939242356"; - String clusterName2 = "clusterName2875867491"; - String clusterUuid = "clusterUuid-1017854240"; Cluster expectedResponse = Cluster.newBuilder() - .setProjectId(projectId2) - .setClusterName(clusterName2) - .setClusterUuid(clusterUuid) + .setProjectId("projectId-894832108") + .setClusterName("clusterName-1141738587") + .setConfig(ClusterConfig.newBuilder().build()) + .putAllLabels(new HashMap()) + .setStatus(ClusterStatus.newBuilder().build()) + .addAllStatusHistory(new ArrayList()) + .setClusterUuid("clusterUuid-1141510955") + .setMetrics(ClusterMetrics.newBuilder().build()) .build(); Operation resultOperation = Operation.newBuilder() @@ -174,9 +166,9 @@ public void updateClusterTest() throws Exception { .build(); mockClusterController.addResponse(resultOperation); - String projectId = "projectId-1969970175"; + String projectId = "projectId-894832108"; String region = "region-934795532"; - String clusterName = "clusterName-1018081872"; + String clusterName = "clusterName-1141738587"; Cluster cluster = Cluster.newBuilder().build(); FieldMask updateMask = FieldMask.newBuilder().build(); @@ -186,7 +178,7 @@ public void updateClusterTest() throws Exception { List actualRequests = mockClusterController.getRequests(); Assert.assertEquals(1, actualRequests.size()); - UpdateClusterRequest actualRequest = (UpdateClusterRequest) actualRequests.get(0); + UpdateClusterRequest actualRequest = ((UpdateClusterRequest) actualRequests.get(0)); Assert.assertEquals(projectId, actualRequest.getProjectId()); Assert.assertEquals(region, actualRequest.getRegion()); @@ -200,29 +192,26 @@ public void updateClusterTest() throws Exception { } @Test - @SuppressWarnings("all") public void updateClusterExceptionTest() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockClusterController.addException(exception); try { - String projectId = "projectId-1969970175"; + String projectId = "projectId-894832108"; String region = "region-934795532"; - String clusterName = "clusterName-1018081872"; + String clusterName = "clusterName-1141738587"; Cluster cluster = Cluster.newBuilder().build(); FieldMask updateMask = FieldMask.newBuilder().build(); - client.updateClusterAsync(projectId, region, clusterName, cluster, updateMask).get(); Assert.fail("No exception raised"); } catch (ExecutionException e) { Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass()); - InvalidArgumentException apiException = (InvalidArgumentException) e.getCause(); + InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause()); Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode()); } } @Test - @SuppressWarnings("all") public void deleteClusterTest() throws Exception { Empty expectedResponse = Empty.newBuilder().build(); Operation resultOperation = @@ -233,16 +222,15 @@ public void deleteClusterTest() throws Exception { .build(); mockClusterController.addResponse(resultOperation); - String projectId = "projectId-1969970175"; + String projectId = "projectId-894832108"; String region = "region-934795532"; - String clusterName = "clusterName-1018081872"; + String clusterName = "clusterName-1141738587"; - Empty actualResponse = client.deleteClusterAsync(projectId, region, clusterName).get(); - Assert.assertEquals(expectedResponse, actualResponse); + client.deleteClusterAsync(projectId, region, clusterName).get(); List actualRequests = mockClusterController.getRequests(); Assert.assertEquals(1, actualRequests.size()); - DeleteClusterRequest actualRequest = (DeleteClusterRequest) actualRequests.get(0); + DeleteClusterRequest actualRequest = ((DeleteClusterRequest) actualRequests.get(0)); Assert.assertEquals(projectId, actualRequest.getProjectId()); Assert.assertEquals(region, actualRequest.getRegion()); @@ -254,47 +242,48 @@ public void deleteClusterTest() throws Exception { } @Test - @SuppressWarnings("all") public void deleteClusterExceptionTest() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockClusterController.addException(exception); try { - String projectId = "projectId-1969970175"; + String projectId = "projectId-894832108"; String region = "region-934795532"; - String clusterName = "clusterName-1018081872"; - + String clusterName = "clusterName-1141738587"; client.deleteClusterAsync(projectId, region, clusterName).get(); Assert.fail("No exception raised"); } catch (ExecutionException e) { Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass()); - InvalidArgumentException apiException = (InvalidArgumentException) e.getCause(); + InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause()); Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode()); } } @Test - @SuppressWarnings("all") - public void diagnoseClusterTest() throws Exception { - Empty expectedResponse = Empty.newBuilder().build(); - Operation resultOperation = - Operation.newBuilder() - .setName("diagnoseClusterTest") - .setDone(true) - .setResponse(Any.pack(expectedResponse)) + public void getClusterTest() throws Exception { + Cluster expectedResponse = + Cluster.newBuilder() + .setProjectId("projectId-894832108") + .setClusterName("clusterName-1141738587") + .setConfig(ClusterConfig.newBuilder().build()) + .putAllLabels(new HashMap()) + .setStatus(ClusterStatus.newBuilder().build()) + .addAllStatusHistory(new ArrayList()) + .setClusterUuid("clusterUuid-1141510955") + .setMetrics(ClusterMetrics.newBuilder().build()) .build(); - mockClusterController.addResponse(resultOperation); + mockClusterController.addResponse(expectedResponse); - String projectId = "projectId-1969970175"; + String projectId = "projectId-894832108"; String region = "region-934795532"; - String clusterName = "clusterName-1018081872"; + String clusterName = "clusterName-1141738587"; - Empty actualResponse = client.diagnoseClusterAsync(projectId, region, clusterName).get(); + Cluster actualResponse = client.getCluster(projectId, region, clusterName); Assert.assertEquals(expectedResponse, actualResponse); List actualRequests = mockClusterController.getRequests(); Assert.assertEquals(1, actualRequests.size()); - DiagnoseClusterRequest actualRequest = (DiagnoseClusterRequest) actualRequests.get(0); + GetClusterRequest actualRequest = ((GetClusterRequest) actualRequests.get(0)); Assert.assertEquals(projectId, actualRequest.getProjectId()); Assert.assertEquals(region, actualRequest.getRegion()); @@ -306,53 +295,47 @@ public void diagnoseClusterTest() throws Exception { } @Test - @SuppressWarnings("all") - public void diagnoseClusterExceptionTest() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); + public void getClusterExceptionTest() throws Exception { + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockClusterController.addException(exception); try { - String projectId = "projectId-1969970175"; + String projectId = "projectId-894832108"; String region = "region-934795532"; - String clusterName = "clusterName-1018081872"; - - client.diagnoseClusterAsync(projectId, region, clusterName).get(); + String clusterName = "clusterName-1141738587"; + client.getCluster(projectId, region, clusterName); Assert.fail("No exception raised"); - } catch (ExecutionException e) { - Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass()); - InvalidArgumentException apiException = (InvalidArgumentException) e.getCause(); - Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode()); + } catch (InvalidArgumentException e) { + // Expected exception. } } @Test - @SuppressWarnings("all") - public void getClusterTest() { - String projectId2 = "projectId2939242356"; - String clusterName2 = "clusterName2875867491"; - String clusterUuid = "clusterUuid-1017854240"; - Cluster expectedResponse = - Cluster.newBuilder() - .setProjectId(projectId2) - .setClusterName(clusterName2) - .setClusterUuid(clusterUuid) + public void listClustersTest() throws Exception { + Cluster responsesElement = Cluster.newBuilder().build(); + ListClustersResponse expectedResponse = + ListClustersResponse.newBuilder() + .setNextPageToken("") + .addAllClusters(Arrays.asList(responsesElement)) .build(); mockClusterController.addResponse(expectedResponse); - String projectId = "projectId-1969970175"; + String projectId = "projectId-894832108"; String region = "region-934795532"; - String clusterName = "clusterName-1018081872"; - Cluster actualResponse = client.getCluster(projectId, region, clusterName); - Assert.assertEquals(expectedResponse, actualResponse); + ListClustersPagedResponse pagedListResponse = client.listClusters(projectId, region); + + List resources = Lists.newArrayList(pagedListResponse.iterateAll()); + + Assert.assertEquals(1, resources.size()); + Assert.assertEquals(expectedResponse.getClustersList().get(0), resources.get(0)); List actualRequests = mockClusterController.getRequests(); Assert.assertEquals(1, actualRequests.size()); - GetClusterRequest actualRequest = (GetClusterRequest) actualRequests.get(0); + ListClustersRequest actualRequest = ((ListClustersRequest) actualRequests.get(0)); Assert.assertEquals(projectId, actualRequest.getProjectId()); Assert.assertEquals(region, actualRequest.getRegion()); - Assert.assertEquals(clusterName, actualRequest.getClusterName()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), @@ -360,51 +343,48 @@ public void getClusterTest() { } @Test - @SuppressWarnings("all") - public void getClusterExceptionTest() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); + public void listClustersExceptionTest() throws Exception { + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockClusterController.addException(exception); try { - String projectId = "projectId-1969970175"; + String projectId = "projectId-894832108"; String region = "region-934795532"; - String clusterName = "clusterName-1018081872"; - - client.getCluster(projectId, region, clusterName); + client.listClusters(projectId, region); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { - // Expected exception + // Expected exception. } } @Test - @SuppressWarnings("all") - public void listClustersTest() { - String nextPageToken = ""; - Cluster clustersElement = Cluster.newBuilder().build(); - List clusters = Arrays.asList(clustersElement); + public void listClustersTest2() throws Exception { + Cluster responsesElement = Cluster.newBuilder().build(); ListClustersResponse expectedResponse = ListClustersResponse.newBuilder() - .setNextPageToken(nextPageToken) - .addAllClusters(clusters) + .setNextPageToken("") + .addAllClusters(Arrays.asList(responsesElement)) .build(); mockClusterController.addResponse(expectedResponse); - String projectId = "projectId-1969970175"; + String projectId = "projectId-894832108"; String region = "region-934795532"; + String filter = "filter-1274492040"; - ListClustersPagedResponse pagedListResponse = client.listClusters(projectId, region); + ListClustersPagedResponse pagedListResponse = client.listClusters(projectId, region, filter); List resources = Lists.newArrayList(pagedListResponse.iterateAll()); + Assert.assertEquals(1, resources.size()); Assert.assertEquals(expectedResponse.getClustersList().get(0), resources.get(0)); List actualRequests = mockClusterController.getRequests(); Assert.assertEquals(1, actualRequests.size()); - ListClustersRequest actualRequest = (ListClustersRequest) actualRequests.get(0); + ListClustersRequest actualRequest = ((ListClustersRequest) actualRequests.get(0)); Assert.assertEquals(projectId, actualRequest.getProjectId()); Assert.assertEquals(region, actualRequest.getRegion()); + Assert.assertEquals(filter, actualRequest.getFilter()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), @@ -412,50 +392,45 @@ public void listClustersTest() { } @Test - @SuppressWarnings("all") - public void listClustersExceptionTest() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); + public void listClustersExceptionTest2() throws Exception { + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockClusterController.addException(exception); try { - String projectId = "projectId-1969970175"; + String projectId = "projectId-894832108"; String region = "region-934795532"; - - client.listClusters(projectId, region); + String filter = "filter-1274492040"; + client.listClusters(projectId, region, filter); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { - // Expected exception + // Expected exception. } } @Test - @SuppressWarnings("all") - public void listClustersTest2() { - String nextPageToken = ""; - Cluster clustersElement = Cluster.newBuilder().build(); - List clusters = Arrays.asList(clustersElement); - ListClustersResponse expectedResponse = - ListClustersResponse.newBuilder() - .setNextPageToken(nextPageToken) - .addAllClusters(clusters) + public void diagnoseClusterTest() throws Exception { + Empty expectedResponse = Empty.newBuilder().build(); + Operation resultOperation = + Operation.newBuilder() + .setName("diagnoseClusterTest") + .setDone(true) + .setResponse(Any.pack(expectedResponse)) .build(); - mockClusterController.addResponse(expectedResponse); + mockClusterController.addResponse(resultOperation); - String projectId = "projectId-1969970175"; + String projectId = "projectId-894832108"; String region = "region-934795532"; + String clusterName = "clusterName-1141738587"; - ListClustersPagedResponse pagedListResponse = client.listClusters(projectId, region); - - List resources = Lists.newArrayList(pagedListResponse.iterateAll()); - Assert.assertEquals(1, resources.size()); - Assert.assertEquals(expectedResponse.getClustersList().get(0), resources.get(0)); + client.diagnoseClusterAsync(projectId, region, clusterName).get(); List actualRequests = mockClusterController.getRequests(); Assert.assertEquals(1, actualRequests.size()); - ListClustersRequest actualRequest = (ListClustersRequest) actualRequests.get(0); + DiagnoseClusterRequest actualRequest = ((DiagnoseClusterRequest) actualRequests.get(0)); Assert.assertEquals(projectId, actualRequest.getProjectId()); Assert.assertEquals(region, actualRequest.getRegion()); + Assert.assertEquals(clusterName, actualRequest.getClusterName()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), @@ -463,19 +438,20 @@ public void listClustersTest2() { } @Test - @SuppressWarnings("all") - public void listClustersExceptionTest2() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); + public void diagnoseClusterExceptionTest() throws Exception { + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockClusterController.addException(exception); try { - String projectId = "projectId-1969970175"; + String projectId = "projectId-894832108"; String region = "region-934795532"; - - client.listClusters(projectId, region); + String clusterName = "clusterName-1141738587"; + client.diagnoseClusterAsync(projectId, region, clusterName).get(); Assert.fail("No exception raised"); - } catch (InvalidArgumentException e) { - // Expected exception + } catch (ExecutionException e) { + Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass()); + InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause()); + Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode()); } } } diff --git a/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1beta2/ClusterControllerSmokeTest.java b/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1beta2/ClusterControllerSmokeTest.java deleted file mode 100644 index 23168f9c..00000000 --- a/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1beta2/ClusterControllerSmokeTest.java +++ /dev/null @@ -1,65 +0,0 @@ -/* - * Copyright 2020 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.google.cloud.dataproc.v1beta2; - -import static com.google.cloud.dataproc.v1beta2.ClusterControllerClient.ListClustersPagedResponse; - -import com.google.common.base.Preconditions; -import java.util.logging.Level; -import java.util.logging.Logger; -import org.junit.Test; - -@javax.annotation.Generated("by GAPIC") -public class ClusterControllerSmokeTest { - private static final String PROJECT_ENV_NAME = "GOOGLE_CLOUD_PROJECT"; - private static final String LEGACY_PROJECT_ENV_NAME = "GCLOUD_PROJECT"; - - @Test - public void run() { - main(null); - } - - public static void main(String args[]) { - Logger.getLogger("").setLevel(Level.WARNING); - try { - executeNoCatch(getProjectId()); - System.out.println("OK"); - } catch (Exception e) { - System.err.println("Failed with exception:"); - e.printStackTrace(System.err); - System.exit(1); - } - } - - public static void executeNoCatch(String projectId) throws Exception { - try (ClusterControllerClient client = ClusterControllerClient.create()) { - String projectId2 = projectId; - String region = "global"; - - ListClustersPagedResponse pagedResponse = client.listClusters(projectId2, region); - } - } - - private static String getProjectId() { - String projectId = System.getProperty(PROJECT_ENV_NAME, System.getenv(PROJECT_ENV_NAME)); - if (projectId == null) { - projectId = - System.getProperty(LEGACY_PROJECT_ENV_NAME, System.getenv(LEGACY_PROJECT_ENV_NAME)); - } - Preconditions.checkArgument(projectId != null, "A project ID is required."); - return projectId; - } -} diff --git a/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1beta2/JobControllerClientTest.java b/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1beta2/JobControllerClientTest.java index 0b686f20..736499f5 100644 --- a/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1beta2/JobControllerClientTest.java +++ b/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1beta2/JobControllerClientTest.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,6 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.dataproc.v1beta2; import static com.google.cloud.dataproc.v1beta2.JobControllerClient.ListJobsPagedResponse; @@ -31,13 +32,15 @@ import com.google.protobuf.Any; import com.google.protobuf.Empty; import com.google.protobuf.FieldMask; -import io.grpc.Status; import io.grpc.StatusRuntimeException; import java.io.IOException; +import java.util.ArrayList; import java.util.Arrays; +import java.util.HashMap; import java.util.List; import java.util.UUID; import java.util.concurrent.ExecutionException; +import javax.annotation.Generated; import org.junit.After; import org.junit.AfterClass; import org.junit.Assert; @@ -45,42 +48,31 @@ import org.junit.BeforeClass; import org.junit.Test; -@javax.annotation.Generated("by GAPIC") +@Generated("by gapic-generator-java") public class JobControllerClientTest { - private static MockAutoscalingPolicyService mockAutoscalingPolicyService; - private static MockClusterController mockClusterController; - private static MockJobController mockJobController; - private static MockWorkflowTemplateService mockWorkflowTemplateService; - private static MockServiceHelper serviceHelper; + private static MockServiceHelper mockServiceHelper; private JobControllerClient client; + private static MockJobController mockJobController; private LocalChannelProvider channelProvider; @BeforeClass public static void startStaticServer() { - mockAutoscalingPolicyService = new MockAutoscalingPolicyService(); - mockClusterController = new MockClusterController(); mockJobController = new MockJobController(); - mockWorkflowTemplateService = new MockWorkflowTemplateService(); - serviceHelper = + mockServiceHelper = new MockServiceHelper( - UUID.randomUUID().toString(), - Arrays.asList( - mockAutoscalingPolicyService, - mockClusterController, - mockJobController, - mockWorkflowTemplateService)); - serviceHelper.start(); + UUID.randomUUID().toString(), Arrays.asList(mockJobController)); + mockServiceHelper.start(); } @AfterClass public static void stopServer() { - serviceHelper.stop(); + mockServiceHelper.stop(); } @Before public void setUp() throws IOException { - serviceHelper.reset(); - channelProvider = serviceHelper.createChannelProvider(); + mockServiceHelper.reset(); + channelProvider = mockServiceHelper.createChannelProvider(); JobControllerSettings settings = JobControllerSettings.newBuilder() .setTransportChannelProvider(channelProvider) @@ -95,24 +87,25 @@ public void tearDown() throws Exception { } @Test - @SuppressWarnings("all") - public void submitJobTest() { - String submittedBy = "submittedBy-2047729125"; - String driverOutputResourceUri = "driverOutputResourceUri-542229086"; - String driverControlFilesUri = "driverControlFilesUri207057643"; - String jobUuid = "jobUuid-1615012099"; - boolean done = true; + public void submitJobTest() throws Exception { Job expectedResponse = Job.newBuilder() - .setSubmittedBy(submittedBy) - .setDriverOutputResourceUri(driverOutputResourceUri) - .setDriverControlFilesUri(driverControlFilesUri) - .setJobUuid(jobUuid) - .setDone(done) + .setReference(JobReference.newBuilder().build()) + .setPlacement(JobPlacement.newBuilder().build()) + .setStatus(JobStatus.newBuilder().build()) + .addAllStatusHistory(new ArrayList()) + .addAllYarnApplications(new ArrayList()) + .setSubmittedBy("submittedBy72490674") + .setDriverOutputResourceUri("driverOutputResourceUri794556277") + .setDriverControlFilesUri("driverControlFilesUri1491948202") + .putAllLabels(new HashMap()) + .setScheduling(JobScheduling.newBuilder().build()) + .setJobUuid("jobUuid-1437868776") + .setDone(true) .build(); mockJobController.addResponse(expectedResponse); - String projectId = "projectId-1969970175"; + String projectId = "projectId-894832108"; String region = "region-934795532"; Job job = Job.newBuilder().build(); @@ -121,7 +114,7 @@ public void submitJobTest() { List actualRequests = mockJobController.getRequests(); Assert.assertEquals(1, actualRequests.size()); - SubmitJobRequest actualRequest = (SubmitJobRequest) actualRequests.get(0); + SubmitJobRequest actualRequest = ((SubmitJobRequest) actualRequests.get(0)); Assert.assertEquals(projectId, actualRequest.getProjectId()); Assert.assertEquals(region, actualRequest.getRegion()); @@ -133,38 +126,37 @@ public void submitJobTest() { } @Test - @SuppressWarnings("all") public void submitJobExceptionTest() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockJobController.addException(exception); try { - String projectId = "projectId-1969970175"; + String projectId = "projectId-894832108"; String region = "region-934795532"; Job job = Job.newBuilder().build(); - client.submitJob(projectId, region, job); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { - // Expected exception + // Expected exception. } } @Test - @SuppressWarnings("all") public void submitJobAsOperationTest() throws Exception { - String submittedBy = "submittedBy-2047729125"; - String driverOutputResourceUri = "driverOutputResourceUri-542229086"; - String driverControlFilesUri = "driverControlFilesUri207057643"; - String jobUuid = "jobUuid-1615012099"; - boolean done = true; Job expectedResponse = Job.newBuilder() - .setSubmittedBy(submittedBy) - .setDriverOutputResourceUri(driverOutputResourceUri) - .setDriverControlFilesUri(driverControlFilesUri) - .setJobUuid(jobUuid) - .setDone(done) + .setReference(JobReference.newBuilder().build()) + .setPlacement(JobPlacement.newBuilder().build()) + .setStatus(JobStatus.newBuilder().build()) + .addAllStatusHistory(new ArrayList()) + .addAllYarnApplications(new ArrayList()) + .setSubmittedBy("submittedBy72490674") + .setDriverOutputResourceUri("driverOutputResourceUri794556277") + .setDriverControlFilesUri("driverControlFilesUri1491948202") + .putAllLabels(new HashMap()) + .setScheduling(JobScheduling.newBuilder().build()) + .setJobUuid("jobUuid-1437868776") + .setDone(true) .build(); Operation resultOperation = Operation.newBuilder() @@ -174,7 +166,7 @@ public void submitJobAsOperationTest() throws Exception { .build(); mockJobController.addResponse(resultOperation); - String projectId = "projectId-1969970175"; + String projectId = "projectId-894832108"; String region = "region-934795532"; Job job = Job.newBuilder().build(); @@ -183,7 +175,7 @@ public void submitJobAsOperationTest() throws Exception { List actualRequests = mockJobController.getRequests(); Assert.assertEquals(1, actualRequests.size()); - SubmitJobRequest actualRequest = (SubmitJobRequest) actualRequests.get(0); + SubmitJobRequest actualRequest = ((SubmitJobRequest) actualRequests.get(0)); Assert.assertEquals(projectId, actualRequest.getProjectId()); Assert.assertEquals(region, actualRequest.getRegion()); @@ -195,53 +187,52 @@ public void submitJobAsOperationTest() throws Exception { } @Test - @SuppressWarnings("all") public void submitJobAsOperationExceptionTest() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockJobController.addException(exception); try { - String projectId = "projectId-1969970175"; + String projectId = "projectId-894832108"; String region = "region-934795532"; Job job = Job.newBuilder().build(); - client.submitJobAsOperationAsync(projectId, region, job).get(); Assert.fail("No exception raised"); } catch (ExecutionException e) { Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass()); - InvalidArgumentException apiException = (InvalidArgumentException) e.getCause(); + InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause()); Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode()); } } @Test - @SuppressWarnings("all") - public void getJobTest() { - String submittedBy = "submittedBy-2047729125"; - String driverOutputResourceUri = "driverOutputResourceUri-542229086"; - String driverControlFilesUri = "driverControlFilesUri207057643"; - String jobUuid = "jobUuid-1615012099"; - boolean done = true; + public void getJobTest() throws Exception { Job expectedResponse = Job.newBuilder() - .setSubmittedBy(submittedBy) - .setDriverOutputResourceUri(driverOutputResourceUri) - .setDriverControlFilesUri(driverControlFilesUri) - .setJobUuid(jobUuid) - .setDone(done) + .setReference(JobReference.newBuilder().build()) + .setPlacement(JobPlacement.newBuilder().build()) + .setStatus(JobStatus.newBuilder().build()) + .addAllStatusHistory(new ArrayList()) + .addAllYarnApplications(new ArrayList()) + .setSubmittedBy("submittedBy72490674") + .setDriverOutputResourceUri("driverOutputResourceUri794556277") + .setDriverControlFilesUri("driverControlFilesUri1491948202") + .putAllLabels(new HashMap()) + .setScheduling(JobScheduling.newBuilder().build()) + .setJobUuid("jobUuid-1437868776") + .setDone(true) .build(); mockJobController.addResponse(expectedResponse); - String projectId = "projectId-1969970175"; + String projectId = "projectId-894832108"; String region = "region-934795532"; - String jobId = "jobId-1154752291"; + String jobId = "jobId101296568"; Job actualResponse = client.getJob(projectId, region, jobId); Assert.assertEquals(expectedResponse, actualResponse); List actualRequests = mockJobController.getRequests(); Assert.assertEquals(1, actualRequests.size()); - GetJobRequest actualRequest = (GetJobRequest) actualRequests.get(0); + GetJobRequest actualRequest = ((GetJobRequest) actualRequests.get(0)); Assert.assertEquals(projectId, actualRequest.getProjectId()); Assert.assertEquals(region, actualRequest.getRegion()); @@ -253,45 +244,44 @@ public void getJobTest() { } @Test - @SuppressWarnings("all") public void getJobExceptionTest() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockJobController.addException(exception); try { - String projectId = "projectId-1969970175"; + String projectId = "projectId-894832108"; String region = "region-934795532"; - String jobId = "jobId-1154752291"; - + String jobId = "jobId101296568"; client.getJob(projectId, region, jobId); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { - // Expected exception + // Expected exception. } } @Test - @SuppressWarnings("all") - public void listJobsTest() { - String nextPageToken = ""; - Job jobsElement = Job.newBuilder().build(); - List jobs = Arrays.asList(jobsElement); + public void listJobsTest() throws Exception { + Job responsesElement = Job.newBuilder().build(); ListJobsResponse expectedResponse = - ListJobsResponse.newBuilder().setNextPageToken(nextPageToken).addAllJobs(jobs).build(); + ListJobsResponse.newBuilder() + .setNextPageToken("") + .addAllJobs(Arrays.asList(responsesElement)) + .build(); mockJobController.addResponse(expectedResponse); - String projectId = "projectId-1969970175"; + String projectId = "projectId-894832108"; String region = "region-934795532"; ListJobsPagedResponse pagedListResponse = client.listJobs(projectId, region); List resources = Lists.newArrayList(pagedListResponse.iterateAll()); + Assert.assertEquals(1, resources.size()); Assert.assertEquals(expectedResponse.getJobsList().get(0), resources.get(0)); List actualRequests = mockJobController.getRequests(); Assert.assertEquals(1, actualRequests.size()); - ListJobsRequest actualRequest = (ListJobsRequest) actualRequests.get(0); + ListJobsRequest actualRequest = ((ListJobsRequest) actualRequests.get(0)); Assert.assertEquals(projectId, actualRequest.getProjectId()); Assert.assertEquals(region, actualRequest.getRegion()); @@ -302,47 +292,48 @@ public void listJobsTest() { } @Test - @SuppressWarnings("all") public void listJobsExceptionTest() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockJobController.addException(exception); try { - String projectId = "projectId-1969970175"; + String projectId = "projectId-894832108"; String region = "region-934795532"; - client.listJobs(projectId, region); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { - // Expected exception + // Expected exception. } } @Test - @SuppressWarnings("all") - public void listJobsTest2() { - String nextPageToken = ""; - Job jobsElement = Job.newBuilder().build(); - List jobs = Arrays.asList(jobsElement); + public void listJobsTest2() throws Exception { + Job responsesElement = Job.newBuilder().build(); ListJobsResponse expectedResponse = - ListJobsResponse.newBuilder().setNextPageToken(nextPageToken).addAllJobs(jobs).build(); + ListJobsResponse.newBuilder() + .setNextPageToken("") + .addAllJobs(Arrays.asList(responsesElement)) + .build(); mockJobController.addResponse(expectedResponse); - String projectId = "projectId-1969970175"; + String projectId = "projectId-894832108"; String region = "region-934795532"; + String filter = "filter-1274492040"; - ListJobsPagedResponse pagedListResponse = client.listJobs(projectId, region); + ListJobsPagedResponse pagedListResponse = client.listJobs(projectId, region, filter); List resources = Lists.newArrayList(pagedListResponse.iterateAll()); + Assert.assertEquals(1, resources.size()); Assert.assertEquals(expectedResponse.getJobsList().get(0), resources.get(0)); List actualRequests = mockJobController.getRequests(); Assert.assertEquals(1, actualRequests.size()); - ListJobsRequest actualRequest = (ListJobsRequest) actualRequests.get(0); + ListJobsRequest actualRequest = ((ListJobsRequest) actualRequests.get(0)); Assert.assertEquals(projectId, actualRequest.getProjectId()); Assert.assertEquals(region, actualRequest.getRegion()); + Assert.assertEquals(filter, actualRequest.getFilter()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), @@ -350,52 +341,47 @@ public void listJobsTest2() { } @Test - @SuppressWarnings("all") public void listJobsExceptionTest2() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockJobController.addException(exception); try { - String projectId = "projectId-1969970175"; + String projectId = "projectId-894832108"; String region = "region-934795532"; - - client.listJobs(projectId, region); + String filter = "filter-1274492040"; + client.listJobs(projectId, region, filter); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { - // Expected exception + // Expected exception. } } @Test - @SuppressWarnings("all") - public void updateJobTest() { - String submittedBy = "submittedBy-2047729125"; - String driverOutputResourceUri = "driverOutputResourceUri-542229086"; - String driverControlFilesUri = "driverControlFilesUri207057643"; - String jobUuid = "jobUuid-1615012099"; - boolean done = true; + public void updateJobTest() throws Exception { Job expectedResponse = Job.newBuilder() - .setSubmittedBy(submittedBy) - .setDriverOutputResourceUri(driverOutputResourceUri) - .setDriverControlFilesUri(driverControlFilesUri) - .setJobUuid(jobUuid) - .setDone(done) + .setReference(JobReference.newBuilder().build()) + .setPlacement(JobPlacement.newBuilder().build()) + .setStatus(JobStatus.newBuilder().build()) + .addAllStatusHistory(new ArrayList()) + .addAllYarnApplications(new ArrayList()) + .setSubmittedBy("submittedBy72490674") + .setDriverOutputResourceUri("driverOutputResourceUri794556277") + .setDriverControlFilesUri("driverControlFilesUri1491948202") + .putAllLabels(new HashMap()) + .setScheduling(JobScheduling.newBuilder().build()) + .setJobUuid("jobUuid-1437868776") + .setDone(true) .build(); mockJobController.addResponse(expectedResponse); - String projectId = "projectId-1969970175"; - String region = "region-934795532"; - String jobId = "jobId-1154752291"; - Job job = Job.newBuilder().build(); - FieldMask updateMask = FieldMask.newBuilder().build(); UpdateJobRequest request = UpdateJobRequest.newBuilder() - .setProjectId(projectId) - .setRegion(region) - .setJobId(jobId) - .setJob(job) - .setUpdateMask(updateMask) + .setProjectId("projectId-894832108") + .setRegion("region-934795532") + .setJobId("jobId101296568") + .setJob(Job.newBuilder().build()) + .setUpdateMask(FieldMask.newBuilder().build()) .build(); Job actualResponse = client.updateJob(request); @@ -403,13 +389,13 @@ public void updateJobTest() { List actualRequests = mockJobController.getRequests(); Assert.assertEquals(1, actualRequests.size()); - UpdateJobRequest actualRequest = (UpdateJobRequest) actualRequests.get(0); + UpdateJobRequest actualRequest = ((UpdateJobRequest) actualRequests.get(0)); - Assert.assertEquals(projectId, actualRequest.getProjectId()); - Assert.assertEquals(region, actualRequest.getRegion()); - Assert.assertEquals(jobId, actualRequest.getJobId()); - Assert.assertEquals(job, actualRequest.getJob()); - Assert.assertEquals(updateMask, actualRequest.getUpdateMask()); + Assert.assertEquals(request.getProjectId(), actualRequest.getProjectId()); + Assert.assertEquals(request.getRegion(), actualRequest.getRegion()); + Assert.assertEquals(request.getJobId(), actualRequest.getJobId()); + Assert.assertEquals(request.getJob(), actualRequest.getJob()); + Assert.assertEquals(request.getUpdateMask(), actualRequest.getUpdateMask()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), @@ -417,61 +403,55 @@ public void updateJobTest() { } @Test - @SuppressWarnings("all") public void updateJobExceptionTest() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockJobController.addException(exception); try { - String projectId = "projectId-1969970175"; - String region = "region-934795532"; - String jobId = "jobId-1154752291"; - Job job = Job.newBuilder().build(); - FieldMask updateMask = FieldMask.newBuilder().build(); UpdateJobRequest request = UpdateJobRequest.newBuilder() - .setProjectId(projectId) - .setRegion(region) - .setJobId(jobId) - .setJob(job) - .setUpdateMask(updateMask) + .setProjectId("projectId-894832108") + .setRegion("region-934795532") + .setJobId("jobId101296568") + .setJob(Job.newBuilder().build()) + .setUpdateMask(FieldMask.newBuilder().build()) .build(); - client.updateJob(request); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { - // Expected exception + // Expected exception. } } @Test - @SuppressWarnings("all") - public void cancelJobTest() { - String submittedBy = "submittedBy-2047729125"; - String driverOutputResourceUri = "driverOutputResourceUri-542229086"; - String driverControlFilesUri = "driverControlFilesUri207057643"; - String jobUuid = "jobUuid-1615012099"; - boolean done = true; + public void cancelJobTest() throws Exception { Job expectedResponse = Job.newBuilder() - .setSubmittedBy(submittedBy) - .setDriverOutputResourceUri(driverOutputResourceUri) - .setDriverControlFilesUri(driverControlFilesUri) - .setJobUuid(jobUuid) - .setDone(done) + .setReference(JobReference.newBuilder().build()) + .setPlacement(JobPlacement.newBuilder().build()) + .setStatus(JobStatus.newBuilder().build()) + .addAllStatusHistory(new ArrayList()) + .addAllYarnApplications(new ArrayList()) + .setSubmittedBy("submittedBy72490674") + .setDriverOutputResourceUri("driverOutputResourceUri794556277") + .setDriverControlFilesUri("driverControlFilesUri1491948202") + .putAllLabels(new HashMap()) + .setScheduling(JobScheduling.newBuilder().build()) + .setJobUuid("jobUuid-1437868776") + .setDone(true) .build(); mockJobController.addResponse(expectedResponse); - String projectId = "projectId-1969970175"; + String projectId = "projectId-894832108"; String region = "region-934795532"; - String jobId = "jobId-1154752291"; + String jobId = "jobId101296568"; Job actualResponse = client.cancelJob(projectId, region, jobId); Assert.assertEquals(expectedResponse, actualResponse); List actualRequests = mockJobController.getRequests(); Assert.assertEquals(1, actualRequests.size()); - CancelJobRequest actualRequest = (CancelJobRequest) actualRequests.get(0); + CancelJobRequest actualRequest = ((CancelJobRequest) actualRequests.get(0)); Assert.assertEquals(projectId, actualRequest.getProjectId()); Assert.assertEquals(region, actualRequest.getRegion()); @@ -483,38 +463,35 @@ public void cancelJobTest() { } @Test - @SuppressWarnings("all") public void cancelJobExceptionTest() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockJobController.addException(exception); try { - String projectId = "projectId-1969970175"; + String projectId = "projectId-894832108"; String region = "region-934795532"; - String jobId = "jobId-1154752291"; - + String jobId = "jobId101296568"; client.cancelJob(projectId, region, jobId); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { - // Expected exception + // Expected exception. } } @Test - @SuppressWarnings("all") - public void deleteJobTest() { + public void deleteJobTest() throws Exception { Empty expectedResponse = Empty.newBuilder().build(); mockJobController.addResponse(expectedResponse); - String projectId = "projectId-1969970175"; + String projectId = "projectId-894832108"; String region = "region-934795532"; - String jobId = "jobId-1154752291"; + String jobId = "jobId101296568"; client.deleteJob(projectId, region, jobId); List actualRequests = mockJobController.getRequests(); Assert.assertEquals(1, actualRequests.size()); - DeleteJobRequest actualRequest = (DeleteJobRequest) actualRequests.get(0); + DeleteJobRequest actualRequest = ((DeleteJobRequest) actualRequests.get(0)); Assert.assertEquals(projectId, actualRequest.getProjectId()); Assert.assertEquals(region, actualRequest.getRegion()); @@ -526,20 +503,18 @@ public void deleteJobTest() { } @Test - @SuppressWarnings("all") public void deleteJobExceptionTest() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockJobController.addException(exception); try { - String projectId = "projectId-1969970175"; + String projectId = "projectId-894832108"; String region = "region-934795532"; - String jobId = "jobId-1154752291"; - + String jobId = "jobId101296568"; client.deleteJob(projectId, region, jobId); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { - // Expected exception + // Expected exception. } } } diff --git a/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1beta2/MockAutoscalingPolicyService.java b/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1beta2/MockAutoscalingPolicyService.java index dee2d73d..067e045d 100644 --- a/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1beta2/MockAutoscalingPolicyService.java +++ b/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1beta2/MockAutoscalingPolicyService.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,6 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.dataproc.v1beta2; import com.google.api.core.BetaApi; @@ -20,9 +21,10 @@ import com.google.protobuf.AbstractMessage; import io.grpc.ServerServiceDefinition; import java.util.List; +import javax.annotation.Generated; -@javax.annotation.Generated("by GAPIC") @BetaApi +@Generated("by gapic-generator-java") public class MockAutoscalingPolicyService implements MockGrpcService { private final MockAutoscalingPolicyServiceImpl serviceImpl; diff --git a/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1beta2/MockAutoscalingPolicyServiceImpl.java b/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1beta2/MockAutoscalingPolicyServiceImpl.java index bf12b9a1..2f31d7da 100644 --- a/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1beta2/MockAutoscalingPolicyServiceImpl.java +++ b/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1beta2/MockAutoscalingPolicyServiceImpl.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,6 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.dataproc.v1beta2; import com.google.api.core.BetaApi; @@ -24,9 +25,10 @@ import java.util.LinkedList; import java.util.List; import java.util.Queue; +import javax.annotation.Generated; -@javax.annotation.Generated("by GAPIC") @BetaApi +@Generated("by gapic-generator-java") public class MockAutoscalingPolicyServiceImpl extends AutoscalingPolicyServiceImplBase { private List requests; private Queue responses; @@ -63,10 +65,10 @@ public void createAutoscalingPolicy( Object response = responses.remove(); if (response instanceof AutoscalingPolicy) { requests.add(request); - responseObserver.onNext((AutoscalingPolicy) response); + responseObserver.onNext(((AutoscalingPolicy) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { - responseObserver.onError((Exception) response); + responseObserver.onError(((Exception) response)); } else { responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); } @@ -78,10 +80,10 @@ public void updateAutoscalingPolicy( Object response = responses.remove(); if (response instanceof AutoscalingPolicy) { requests.add(request); - responseObserver.onNext((AutoscalingPolicy) response); + responseObserver.onNext(((AutoscalingPolicy) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { - responseObserver.onError((Exception) response); + responseObserver.onError(((Exception) response)); } else { responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); } @@ -93,10 +95,10 @@ public void getAutoscalingPolicy( Object response = responses.remove(); if (response instanceof AutoscalingPolicy) { requests.add(request); - responseObserver.onNext((AutoscalingPolicy) response); + responseObserver.onNext(((AutoscalingPolicy) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { - responseObserver.onError((Exception) response); + responseObserver.onError(((Exception) response)); } else { responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); } @@ -109,10 +111,10 @@ public void listAutoscalingPolicies( Object response = responses.remove(); if (response instanceof ListAutoscalingPoliciesResponse) { requests.add(request); - responseObserver.onNext((ListAutoscalingPoliciesResponse) response); + responseObserver.onNext(((ListAutoscalingPoliciesResponse) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { - responseObserver.onError((Exception) response); + responseObserver.onError(((Exception) response)); } else { responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); } @@ -124,10 +126,10 @@ public void deleteAutoscalingPolicy( Object response = responses.remove(); if (response instanceof Empty) { requests.add(request); - responseObserver.onNext((Empty) response); + responseObserver.onNext(((Empty) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { - responseObserver.onError((Exception) response); + responseObserver.onError(((Exception) response)); } else { responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); } diff --git a/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1beta2/MockClusterController.java b/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1beta2/MockClusterController.java index 43bc081f..296e5a3d 100644 --- a/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1beta2/MockClusterController.java +++ b/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1beta2/MockClusterController.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,6 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.dataproc.v1beta2; import com.google.api.core.BetaApi; @@ -20,9 +21,10 @@ import com.google.protobuf.AbstractMessage; import io.grpc.ServerServiceDefinition; import java.util.List; +import javax.annotation.Generated; -@javax.annotation.Generated("by GAPIC") @BetaApi +@Generated("by gapic-generator-java") public class MockClusterController implements MockGrpcService { private final MockClusterControllerImpl serviceImpl; diff --git a/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1beta2/MockClusterControllerImpl.java b/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1beta2/MockClusterControllerImpl.java index c544d367..1f62239c 100644 --- a/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1beta2/MockClusterControllerImpl.java +++ b/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1beta2/MockClusterControllerImpl.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,6 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.dataproc.v1beta2; import com.google.api.core.BetaApi; @@ -24,9 +25,10 @@ import java.util.LinkedList; import java.util.List; import java.util.Queue; +import javax.annotation.Generated; -@javax.annotation.Generated("by GAPIC") @BetaApi +@Generated("by gapic-generator-java") public class MockClusterControllerImpl extends ClusterControllerImplBase { private List requests; private Queue responses; @@ -63,10 +65,10 @@ public void createCluster( Object response = responses.remove(); if (response instanceof Operation) { requests.add(request); - responseObserver.onNext((Operation) response); + responseObserver.onNext(((Operation) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { - responseObserver.onError((Exception) response); + responseObserver.onError(((Exception) response)); } else { responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); } @@ -78,10 +80,10 @@ public void updateCluster( Object response = responses.remove(); if (response instanceof Operation) { requests.add(request); - responseObserver.onNext((Operation) response); + responseObserver.onNext(((Operation) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { - responseObserver.onError((Exception) response); + responseObserver.onError(((Exception) response)); } else { responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); } @@ -93,10 +95,10 @@ public void deleteCluster( Object response = responses.remove(); if (response instanceof Operation) { requests.add(request); - responseObserver.onNext((Operation) response); + responseObserver.onNext(((Operation) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { - responseObserver.onError((Exception) response); + responseObserver.onError(((Exception) response)); } else { responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); } @@ -107,10 +109,10 @@ public void getCluster(GetClusterRequest request, StreamObserver respon Object response = responses.remove(); if (response instanceof Cluster) { requests.add(request); - responseObserver.onNext((Cluster) response); + responseObserver.onNext(((Cluster) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { - responseObserver.onError((Exception) response); + responseObserver.onError(((Exception) response)); } else { responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); } @@ -122,10 +124,10 @@ public void listClusters( Object response = responses.remove(); if (response instanceof ListClustersResponse) { requests.add(request); - responseObserver.onNext((ListClustersResponse) response); + responseObserver.onNext(((ListClustersResponse) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { - responseObserver.onError((Exception) response); + responseObserver.onError(((Exception) response)); } else { responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); } @@ -137,10 +139,10 @@ public void diagnoseCluster( Object response = responses.remove(); if (response instanceof Operation) { requests.add(request); - responseObserver.onNext((Operation) response); + responseObserver.onNext(((Operation) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { - responseObserver.onError((Exception) response); + responseObserver.onError(((Exception) response)); } else { responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); } diff --git a/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1beta2/MockJobController.java b/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1beta2/MockJobController.java index 3ed43f60..b970524d 100644 --- a/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1beta2/MockJobController.java +++ b/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1beta2/MockJobController.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,6 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.dataproc.v1beta2; import com.google.api.core.BetaApi; @@ -20,9 +21,10 @@ import com.google.protobuf.AbstractMessage; import io.grpc.ServerServiceDefinition; import java.util.List; +import javax.annotation.Generated; -@javax.annotation.Generated("by GAPIC") @BetaApi +@Generated("by gapic-generator-java") public class MockJobController implements MockGrpcService { private final MockJobControllerImpl serviceImpl; diff --git a/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1beta2/MockJobControllerImpl.java b/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1beta2/MockJobControllerImpl.java index c8c5c0a8..f533ba1c 100644 --- a/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1beta2/MockJobControllerImpl.java +++ b/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1beta2/MockJobControllerImpl.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,6 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.dataproc.v1beta2; import com.google.api.core.BetaApi; @@ -25,9 +26,10 @@ import java.util.LinkedList; import java.util.List; import java.util.Queue; +import javax.annotation.Generated; -@javax.annotation.Generated("by GAPIC") @BetaApi +@Generated("by gapic-generator-java") public class MockJobControllerImpl extends JobControllerImplBase { private List requests; private Queue responses; @@ -63,10 +65,10 @@ public void submitJob(SubmitJobRequest request, StreamObserver responseObse Object response = responses.remove(); if (response instanceof Job) { requests.add(request); - responseObserver.onNext((Job) response); + responseObserver.onNext(((Job) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { - responseObserver.onError((Exception) response); + responseObserver.onError(((Exception) response)); } else { responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); } @@ -78,10 +80,10 @@ public void submitJobAsOperation( Object response = responses.remove(); if (response instanceof Operation) { requests.add(request); - responseObserver.onNext((Operation) response); + responseObserver.onNext(((Operation) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { - responseObserver.onError((Exception) response); + responseObserver.onError(((Exception) response)); } else { responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); } @@ -92,10 +94,10 @@ public void getJob(GetJobRequest request, StreamObserver responseObserver) Object response = responses.remove(); if (response instanceof Job) { requests.add(request); - responseObserver.onNext((Job) response); + responseObserver.onNext(((Job) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { - responseObserver.onError((Exception) response); + responseObserver.onError(((Exception) response)); } else { responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); } @@ -106,10 +108,10 @@ public void listJobs(ListJobsRequest request, StreamObserver r Object response = responses.remove(); if (response instanceof ListJobsResponse) { requests.add(request); - responseObserver.onNext((ListJobsResponse) response); + responseObserver.onNext(((ListJobsResponse) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { - responseObserver.onError((Exception) response); + responseObserver.onError(((Exception) response)); } else { responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); } @@ -120,10 +122,10 @@ public void updateJob(UpdateJobRequest request, StreamObserver responseObse Object response = responses.remove(); if (response instanceof Job) { requests.add(request); - responseObserver.onNext((Job) response); + responseObserver.onNext(((Job) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { - responseObserver.onError((Exception) response); + responseObserver.onError(((Exception) response)); } else { responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); } @@ -134,10 +136,10 @@ public void cancelJob(CancelJobRequest request, StreamObserver responseObse Object response = responses.remove(); if (response instanceof Job) { requests.add(request); - responseObserver.onNext((Job) response); + responseObserver.onNext(((Job) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { - responseObserver.onError((Exception) response); + responseObserver.onError(((Exception) response)); } else { responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); } @@ -148,10 +150,10 @@ public void deleteJob(DeleteJobRequest request, StreamObserver responseOb Object response = responses.remove(); if (response instanceof Empty) { requests.add(request); - responseObserver.onNext((Empty) response); + responseObserver.onNext(((Empty) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { - responseObserver.onError((Exception) response); + responseObserver.onError(((Exception) response)); } else { responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); } diff --git a/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1beta2/MockWorkflowTemplateService.java b/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1beta2/MockWorkflowTemplateService.java index 9e3930bd..295398c7 100644 --- a/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1beta2/MockWorkflowTemplateService.java +++ b/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1beta2/MockWorkflowTemplateService.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,6 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.dataproc.v1beta2; import com.google.api.core.BetaApi; @@ -20,9 +21,10 @@ import com.google.protobuf.AbstractMessage; import io.grpc.ServerServiceDefinition; import java.util.List; +import javax.annotation.Generated; -@javax.annotation.Generated("by GAPIC") @BetaApi +@Generated("by gapic-generator-java") public class MockWorkflowTemplateService implements MockGrpcService { private final MockWorkflowTemplateServiceImpl serviceImpl; diff --git a/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1beta2/MockWorkflowTemplateServiceImpl.java b/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1beta2/MockWorkflowTemplateServiceImpl.java index e6968e89..c373ce15 100644 --- a/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1beta2/MockWorkflowTemplateServiceImpl.java +++ b/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1beta2/MockWorkflowTemplateServiceImpl.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,6 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.dataproc.v1beta2; import com.google.api.core.BetaApi; @@ -25,9 +26,10 @@ import java.util.LinkedList; import java.util.List; import java.util.Queue; +import javax.annotation.Generated; -@javax.annotation.Generated("by GAPIC") @BetaApi +@Generated("by gapic-generator-java") public class MockWorkflowTemplateServiceImpl extends WorkflowTemplateServiceImplBase { private List requests; private Queue responses; @@ -64,10 +66,10 @@ public void createWorkflowTemplate( Object response = responses.remove(); if (response instanceof WorkflowTemplate) { requests.add(request); - responseObserver.onNext((WorkflowTemplate) response); + responseObserver.onNext(((WorkflowTemplate) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { - responseObserver.onError((Exception) response); + responseObserver.onError(((Exception) response)); } else { responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); } @@ -79,10 +81,10 @@ public void getWorkflowTemplate( Object response = responses.remove(); if (response instanceof WorkflowTemplate) { requests.add(request); - responseObserver.onNext((WorkflowTemplate) response); + responseObserver.onNext(((WorkflowTemplate) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { - responseObserver.onError((Exception) response); + responseObserver.onError(((Exception) response)); } else { responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); } @@ -94,10 +96,10 @@ public void instantiateWorkflowTemplate( Object response = responses.remove(); if (response instanceof Operation) { requests.add(request); - responseObserver.onNext((Operation) response); + responseObserver.onNext(((Operation) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { - responseObserver.onError((Exception) response); + responseObserver.onError(((Exception) response)); } else { responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); } @@ -110,10 +112,10 @@ public void instantiateInlineWorkflowTemplate( Object response = responses.remove(); if (response instanceof Operation) { requests.add(request); - responseObserver.onNext((Operation) response); + responseObserver.onNext(((Operation) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { - responseObserver.onError((Exception) response); + responseObserver.onError(((Exception) response)); } else { responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); } @@ -125,10 +127,10 @@ public void updateWorkflowTemplate( Object response = responses.remove(); if (response instanceof WorkflowTemplate) { requests.add(request); - responseObserver.onNext((WorkflowTemplate) response); + responseObserver.onNext(((WorkflowTemplate) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { - responseObserver.onError((Exception) response); + responseObserver.onError(((Exception) response)); } else { responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); } @@ -141,10 +143,10 @@ public void listWorkflowTemplates( Object response = responses.remove(); if (response instanceof ListWorkflowTemplatesResponse) { requests.add(request); - responseObserver.onNext((ListWorkflowTemplatesResponse) response); + responseObserver.onNext(((ListWorkflowTemplatesResponse) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { - responseObserver.onError((Exception) response); + responseObserver.onError(((Exception) response)); } else { responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); } @@ -156,10 +158,10 @@ public void deleteWorkflowTemplate( Object response = responses.remove(); if (response instanceof Empty) { requests.add(request); - responseObserver.onNext((Empty) response); + responseObserver.onNext(((Empty) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { - responseObserver.onError((Exception) response); + responseObserver.onError(((Exception) response)); } else { responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); } diff --git a/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1beta2/WorkflowTemplateServiceClientTest.java b/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1beta2/WorkflowTemplateServiceClientTest.java index 66f0b181..1594a95d 100644 --- a/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1beta2/WorkflowTemplateServiceClientTest.java +++ b/google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1beta2/WorkflowTemplateServiceClientTest.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,6 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.dataproc.v1beta2; import static com.google.cloud.dataproc.v1beta2.WorkflowTemplateServiceClient.ListWorkflowTemplatesPagedResponse; @@ -29,16 +30,19 @@ import com.google.longrunning.Operation; import com.google.protobuf.AbstractMessage; import com.google.protobuf.Any; +import com.google.protobuf.Duration; import com.google.protobuf.Empty; -import io.grpc.Status; +import com.google.protobuf.Timestamp; import io.grpc.StatusRuntimeException; import java.io.IOException; +import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.UUID; import java.util.concurrent.ExecutionException; +import javax.annotation.Generated; import org.junit.After; import org.junit.AfterClass; import org.junit.Assert; @@ -46,42 +50,32 @@ import org.junit.BeforeClass; import org.junit.Test; -@javax.annotation.Generated("by GAPIC") +@Generated("by gapic-generator-java") public class WorkflowTemplateServiceClientTest { - private static MockAutoscalingPolicyService mockAutoscalingPolicyService; - private static MockClusterController mockClusterController; - private static MockJobController mockJobController; private static MockWorkflowTemplateService mockWorkflowTemplateService; - private static MockServiceHelper serviceHelper; + private static MockServiceHelper mockServiceHelper; private WorkflowTemplateServiceClient client; private LocalChannelProvider channelProvider; @BeforeClass public static void startStaticServer() { - mockAutoscalingPolicyService = new MockAutoscalingPolicyService(); - mockClusterController = new MockClusterController(); - mockJobController = new MockJobController(); mockWorkflowTemplateService = new MockWorkflowTemplateService(); - serviceHelper = + mockServiceHelper = new MockServiceHelper( UUID.randomUUID().toString(), - Arrays.asList( - mockAutoscalingPolicyService, - mockClusterController, - mockJobController, - mockWorkflowTemplateService)); - serviceHelper.start(); + Arrays.asList(mockWorkflowTemplateService)); + mockServiceHelper.start(); } @AfterClass public static void stopServer() { - serviceHelper.stop(); + mockServiceHelper.stop(); } @Before public void setUp() throws IOException { - serviceHelper.reset(); - channelProvider = serviceHelper.createChannelProvider(); + mockServiceHelper.reset(); + channelProvider = mockServiceHelper.createChannelProvider(); WorkflowTemplateServiceSettings settings = WorkflowTemplateServiceSettings.newBuilder() .setTransportChannelProvider(channelProvider) @@ -96,7 +90,272 @@ public void tearDown() throws Exception { } @Test - @SuppressWarnings("all") + public void createWorkflowTemplateTest() throws Exception { + WorkflowTemplate expectedResponse = + WorkflowTemplate.newBuilder() + .setId("id3355") + .setName( + WorkflowTemplateName.ofProjectRegionWorkflowTemplateName( + "[PROJECT]", "[REGION]", "[WORKFLOW_TEMPLATE]") + .toString()) + .setVersion(351608024) + .setCreateTime(Timestamp.newBuilder().build()) + .setUpdateTime(Timestamp.newBuilder().build()) + .putAllLabels(new HashMap()) + .setPlacement(WorkflowTemplatePlacement.newBuilder().build()) + .addAllJobs(new ArrayList()) + .addAllParameters(new ArrayList()) + .setDagTimeout(Duration.newBuilder().build()) + .build(); + mockWorkflowTemplateService.addResponse(expectedResponse); + + LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]"); + WorkflowTemplate template = WorkflowTemplate.newBuilder().build(); + + WorkflowTemplate actualResponse = client.createWorkflowTemplate(parent, template); + Assert.assertEquals(expectedResponse, actualResponse); + + List actualRequests = mockWorkflowTemplateService.getRequests(); + Assert.assertEquals(1, actualRequests.size()); + CreateWorkflowTemplateRequest actualRequest = + ((CreateWorkflowTemplateRequest) actualRequests.get(0)); + + Assert.assertEquals(parent.toString(), actualRequest.getParent()); + Assert.assertEquals(template, actualRequest.getTemplate()); + Assert.assertTrue( + channelProvider.isHeaderSent( + ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), + GaxGrpcProperties.getDefaultApiClientHeaderPattern())); + } + + @Test + public void createWorkflowTemplateExceptionTest() throws Exception { + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); + mockWorkflowTemplateService.addException(exception); + + try { + LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]"); + WorkflowTemplate template = WorkflowTemplate.newBuilder().build(); + client.createWorkflowTemplate(parent, template); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception. + } + } + + @Test + public void createWorkflowTemplateTest2() throws Exception { + WorkflowTemplate expectedResponse = + WorkflowTemplate.newBuilder() + .setId("id3355") + .setName( + WorkflowTemplateName.ofProjectRegionWorkflowTemplateName( + "[PROJECT]", "[REGION]", "[WORKFLOW_TEMPLATE]") + .toString()) + .setVersion(351608024) + .setCreateTime(Timestamp.newBuilder().build()) + .setUpdateTime(Timestamp.newBuilder().build()) + .putAllLabels(new HashMap()) + .setPlacement(WorkflowTemplatePlacement.newBuilder().build()) + .addAllJobs(new ArrayList()) + .addAllParameters(new ArrayList()) + .setDagTimeout(Duration.newBuilder().build()) + .build(); + mockWorkflowTemplateService.addResponse(expectedResponse); + + RegionName parent = RegionName.of("[PROJECT]", "[REGION]"); + WorkflowTemplate template = WorkflowTemplate.newBuilder().build(); + + WorkflowTemplate actualResponse = client.createWorkflowTemplate(parent, template); + Assert.assertEquals(expectedResponse, actualResponse); + + List actualRequests = mockWorkflowTemplateService.getRequests(); + Assert.assertEquals(1, actualRequests.size()); + CreateWorkflowTemplateRequest actualRequest = + ((CreateWorkflowTemplateRequest) actualRequests.get(0)); + + Assert.assertEquals(parent.toString(), actualRequest.getParent()); + Assert.assertEquals(template, actualRequest.getTemplate()); + Assert.assertTrue( + channelProvider.isHeaderSent( + ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), + GaxGrpcProperties.getDefaultApiClientHeaderPattern())); + } + + @Test + public void createWorkflowTemplateExceptionTest2() throws Exception { + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); + mockWorkflowTemplateService.addException(exception); + + try { + RegionName parent = RegionName.of("[PROJECT]", "[REGION]"); + WorkflowTemplate template = WorkflowTemplate.newBuilder().build(); + client.createWorkflowTemplate(parent, template); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception. + } + } + + @Test + public void createWorkflowTemplateTest3() throws Exception { + WorkflowTemplate expectedResponse = + WorkflowTemplate.newBuilder() + .setId("id3355") + .setName( + WorkflowTemplateName.ofProjectRegionWorkflowTemplateName( + "[PROJECT]", "[REGION]", "[WORKFLOW_TEMPLATE]") + .toString()) + .setVersion(351608024) + .setCreateTime(Timestamp.newBuilder().build()) + .setUpdateTime(Timestamp.newBuilder().build()) + .putAllLabels(new HashMap()) + .setPlacement(WorkflowTemplatePlacement.newBuilder().build()) + .addAllJobs(new ArrayList()) + .addAllParameters(new ArrayList()) + .setDagTimeout(Duration.newBuilder().build()) + .build(); + mockWorkflowTemplateService.addResponse(expectedResponse); + + String parent = "parent-995424086"; + WorkflowTemplate template = WorkflowTemplate.newBuilder().build(); + + WorkflowTemplate actualResponse = client.createWorkflowTemplate(parent, template); + Assert.assertEquals(expectedResponse, actualResponse); + + List actualRequests = mockWorkflowTemplateService.getRequests(); + Assert.assertEquals(1, actualRequests.size()); + CreateWorkflowTemplateRequest actualRequest = + ((CreateWorkflowTemplateRequest) actualRequests.get(0)); + + Assert.assertEquals(parent, actualRequest.getParent()); + Assert.assertEquals(template, actualRequest.getTemplate()); + Assert.assertTrue( + channelProvider.isHeaderSent( + ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), + GaxGrpcProperties.getDefaultApiClientHeaderPattern())); + } + + @Test + public void createWorkflowTemplateExceptionTest3() throws Exception { + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); + mockWorkflowTemplateService.addException(exception); + + try { + String parent = "parent-995424086"; + WorkflowTemplate template = WorkflowTemplate.newBuilder().build(); + client.createWorkflowTemplate(parent, template); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception. + } + } + + @Test + public void getWorkflowTemplateTest() throws Exception { + WorkflowTemplate expectedResponse = + WorkflowTemplate.newBuilder() + .setId("id3355") + .setName( + WorkflowTemplateName.ofProjectRegionWorkflowTemplateName( + "[PROJECT]", "[REGION]", "[WORKFLOW_TEMPLATE]") + .toString()) + .setVersion(351608024) + .setCreateTime(Timestamp.newBuilder().build()) + .setUpdateTime(Timestamp.newBuilder().build()) + .putAllLabels(new HashMap()) + .setPlacement(WorkflowTemplatePlacement.newBuilder().build()) + .addAllJobs(new ArrayList()) + .addAllParameters(new ArrayList()) + .setDagTimeout(Duration.newBuilder().build()) + .build(); + mockWorkflowTemplateService.addResponse(expectedResponse); + + WorkflowTemplateName name = + WorkflowTemplateName.ofProjectRegionWorkflowTemplateName( + "[PROJECT]", "[REGION]", "[WORKFLOW_TEMPLATE]"); + + WorkflowTemplate actualResponse = client.getWorkflowTemplate(name); + Assert.assertEquals(expectedResponse, actualResponse); + + List actualRequests = mockWorkflowTemplateService.getRequests(); + Assert.assertEquals(1, actualRequests.size()); + GetWorkflowTemplateRequest actualRequest = ((GetWorkflowTemplateRequest) actualRequests.get(0)); + + Assert.assertEquals(name.toString(), actualRequest.getName()); + Assert.assertTrue( + channelProvider.isHeaderSent( + ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), + GaxGrpcProperties.getDefaultApiClientHeaderPattern())); + } + + @Test + public void getWorkflowTemplateExceptionTest() throws Exception { + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); + mockWorkflowTemplateService.addException(exception); + + try { + WorkflowTemplateName name = + WorkflowTemplateName.ofProjectRegionWorkflowTemplateName( + "[PROJECT]", "[REGION]", "[WORKFLOW_TEMPLATE]"); + client.getWorkflowTemplate(name); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception. + } + } + + @Test + public void getWorkflowTemplateTest2() throws Exception { + WorkflowTemplate expectedResponse = + WorkflowTemplate.newBuilder() + .setId("id3355") + .setName( + WorkflowTemplateName.ofProjectRegionWorkflowTemplateName( + "[PROJECT]", "[REGION]", "[WORKFLOW_TEMPLATE]") + .toString()) + .setVersion(351608024) + .setCreateTime(Timestamp.newBuilder().build()) + .setUpdateTime(Timestamp.newBuilder().build()) + .putAllLabels(new HashMap()) + .setPlacement(WorkflowTemplatePlacement.newBuilder().build()) + .addAllJobs(new ArrayList()) + .addAllParameters(new ArrayList()) + .setDagTimeout(Duration.newBuilder().build()) + .build(); + mockWorkflowTemplateService.addResponse(expectedResponse); + + String name = "name3373707"; + + WorkflowTemplate actualResponse = client.getWorkflowTemplate(name); + Assert.assertEquals(expectedResponse, actualResponse); + + List actualRequests = mockWorkflowTemplateService.getRequests(); + Assert.assertEquals(1, actualRequests.size()); + GetWorkflowTemplateRequest actualRequest = ((GetWorkflowTemplateRequest) actualRequests.get(0)); + + Assert.assertEquals(name, actualRequest.getName()); + Assert.assertTrue( + channelProvider.isHeaderSent( + ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), + GaxGrpcProperties.getDefaultApiClientHeaderPattern())); + } + + @Test + public void getWorkflowTemplateExceptionTest2() throws Exception { + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); + mockWorkflowTemplateService.addException(exception); + + try { + String name = "name3373707"; + client.getWorkflowTemplate(name); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception. + } + } + + @Test public void instantiateWorkflowTemplateTest() throws Exception { Empty expectedResponse = Empty.newBuilder().build(); Operation resultOperation = @@ -111,15 +370,14 @@ public void instantiateWorkflowTemplateTest() throws Exception { WorkflowTemplateName.ofProjectRegionWorkflowTemplateName( "[PROJECT]", "[REGION]", "[WORKFLOW_TEMPLATE]"); - Empty actualResponse = client.instantiateWorkflowTemplateAsync(name).get(); - Assert.assertEquals(expectedResponse, actualResponse); + client.instantiateWorkflowTemplateAsync(name).get(); List actualRequests = mockWorkflowTemplateService.getRequests(); Assert.assertEquals(1, actualRequests.size()); InstantiateWorkflowTemplateRequest actualRequest = - (InstantiateWorkflowTemplateRequest) actualRequests.get(0); + ((InstantiateWorkflowTemplateRequest) actualRequests.get(0)); - Assert.assertEquals(name, WorkflowTemplateName.parse(actualRequest.getName())); + Assert.assertEquals(name.toString(), actualRequest.getName()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), @@ -127,32 +385,72 @@ public void instantiateWorkflowTemplateTest() throws Exception { } @Test - @SuppressWarnings("all") public void instantiateWorkflowTemplateExceptionTest() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockWorkflowTemplateService.addException(exception); try { WorkflowTemplateName name = WorkflowTemplateName.ofProjectRegionWorkflowTemplateName( "[PROJECT]", "[REGION]", "[WORKFLOW_TEMPLATE]"); - client.instantiateWorkflowTemplateAsync(name).get(); Assert.fail("No exception raised"); } catch (ExecutionException e) { Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass()); - InvalidArgumentException apiException = (InvalidArgumentException) e.getCause(); + InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause()); Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode()); } } @Test - @SuppressWarnings("all") public void instantiateWorkflowTemplateTest2() throws Exception { Empty expectedResponse = Empty.newBuilder().build(); Operation resultOperation = Operation.newBuilder() - .setName("instantiateWorkflowTemplateTest2") + .setName("instantiateWorkflowTemplateTest") + .setDone(true) + .setResponse(Any.pack(expectedResponse)) + .build(); + mockWorkflowTemplateService.addResponse(resultOperation); + + String name = "name3373707"; + + client.instantiateWorkflowTemplateAsync(name).get(); + + List actualRequests = mockWorkflowTemplateService.getRequests(); + Assert.assertEquals(1, actualRequests.size()); + InstantiateWorkflowTemplateRequest actualRequest = + ((InstantiateWorkflowTemplateRequest) actualRequests.get(0)); + + Assert.assertEquals(name, actualRequest.getName()); + Assert.assertTrue( + channelProvider.isHeaderSent( + ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), + GaxGrpcProperties.getDefaultApiClientHeaderPattern())); + } + + @Test + public void instantiateWorkflowTemplateExceptionTest2() throws Exception { + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); + mockWorkflowTemplateService.addException(exception); + + try { + String name = "name3373707"; + client.instantiateWorkflowTemplateAsync(name).get(); + Assert.fail("No exception raised"); + } catch (ExecutionException e) { + Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass()); + InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause()); + Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode()); + } + } + + @Test + public void instantiateWorkflowTemplateTest3() throws Exception { + Empty expectedResponse = Empty.newBuilder().build(); + Operation resultOperation = + Operation.newBuilder() + .setName("instantiateWorkflowTemplateTest") .setDone(true) .setResponse(Any.pack(expectedResponse)) .build(); @@ -163,15 +461,14 @@ public void instantiateWorkflowTemplateTest2() throws Exception { "[PROJECT]", "[REGION]", "[WORKFLOW_TEMPLATE]"); Map parameters = new HashMap<>(); - Empty actualResponse = client.instantiateWorkflowTemplateAsync(name, parameters).get(); - Assert.assertEquals(expectedResponse, actualResponse); + client.instantiateWorkflowTemplateAsync(name, parameters).get(); List actualRequests = mockWorkflowTemplateService.getRequests(); Assert.assertEquals(1, actualRequests.size()); InstantiateWorkflowTemplateRequest actualRequest = - (InstantiateWorkflowTemplateRequest) actualRequests.get(0); + ((InstantiateWorkflowTemplateRequest) actualRequests.get(0)); - Assert.assertEquals(name, WorkflowTemplateName.parse(actualRequest.getName())); + Assert.assertEquals(name.toString(), actualRequest.getName()); Assert.assertEquals(parameters, actualRequest.getParametersMap()); Assert.assertTrue( channelProvider.isHeaderSent( @@ -180,9 +477,8 @@ public void instantiateWorkflowTemplateTest2() throws Exception { } @Test - @SuppressWarnings("all") - public void instantiateWorkflowTemplateExceptionTest2() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); + public void instantiateWorkflowTemplateExceptionTest3() throws Exception { + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockWorkflowTemplateService.addException(exception); try { @@ -190,18 +486,62 @@ public void instantiateWorkflowTemplateExceptionTest2() throws Exception { WorkflowTemplateName.ofProjectRegionWorkflowTemplateName( "[PROJECT]", "[REGION]", "[WORKFLOW_TEMPLATE]"); Map parameters = new HashMap<>(); + client.instantiateWorkflowTemplateAsync(name, parameters).get(); + Assert.fail("No exception raised"); + } catch (ExecutionException e) { + Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass()); + InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause()); + Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode()); + } + } + + @Test + public void instantiateWorkflowTemplateTest4() throws Exception { + Empty expectedResponse = Empty.newBuilder().build(); + Operation resultOperation = + Operation.newBuilder() + .setName("instantiateWorkflowTemplateTest") + .setDone(true) + .setResponse(Any.pack(expectedResponse)) + .build(); + mockWorkflowTemplateService.addResponse(resultOperation); + + String name = "name3373707"; + Map parameters = new HashMap<>(); + + client.instantiateWorkflowTemplateAsync(name, parameters).get(); + List actualRequests = mockWorkflowTemplateService.getRequests(); + Assert.assertEquals(1, actualRequests.size()); + InstantiateWorkflowTemplateRequest actualRequest = + ((InstantiateWorkflowTemplateRequest) actualRequests.get(0)); + + Assert.assertEquals(name, actualRequest.getName()); + Assert.assertEquals(parameters, actualRequest.getParametersMap()); + Assert.assertTrue( + channelProvider.isHeaderSent( + ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), + GaxGrpcProperties.getDefaultApiClientHeaderPattern())); + } + + @Test + public void instantiateWorkflowTemplateExceptionTest4() throws Exception { + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); + mockWorkflowTemplateService.addException(exception); + + try { + String name = "name3373707"; + Map parameters = new HashMap<>(); client.instantiateWorkflowTemplateAsync(name, parameters).get(); Assert.fail("No exception raised"); } catch (ExecutionException e) { Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass()); - InvalidArgumentException apiException = (InvalidArgumentException) e.getCause(); + InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause()); Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode()); } } @Test - @SuppressWarnings("all") public void instantiateInlineWorkflowTemplateTest() throws Exception { Empty expectedResponse = Empty.newBuilder().build(); Operation resultOperation = @@ -212,18 +552,17 @@ public void instantiateInlineWorkflowTemplateTest() throws Exception { .build(); mockWorkflowTemplateService.addResponse(resultOperation); - RegionName parent = RegionName.of("[PROJECT]", "[REGION]"); + LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]"); WorkflowTemplate template = WorkflowTemplate.newBuilder().build(); - Empty actualResponse = client.instantiateInlineWorkflowTemplateAsync(parent, template).get(); - Assert.assertEquals(expectedResponse, actualResponse); + client.instantiateInlineWorkflowTemplateAsync(parent, template).get(); List actualRequests = mockWorkflowTemplateService.getRequests(); Assert.assertEquals(1, actualRequests.size()); InstantiateInlineWorkflowTemplateRequest actualRequest = - (InstantiateInlineWorkflowTemplateRequest) actualRequests.get(0); + ((InstantiateInlineWorkflowTemplateRequest) actualRequests.get(0)); - Assert.assertEquals(parent, RegionName.parse(actualRequest.getParent())); + Assert.assertEquals(parent.toString(), actualRequest.getParent()); Assert.assertEquals(template, actualRequest.getTemplate()); Assert.assertTrue( channelProvider.isHeaderSent( @@ -232,52 +571,44 @@ public void instantiateInlineWorkflowTemplateTest() throws Exception { } @Test - @SuppressWarnings("all") public void instantiateInlineWorkflowTemplateExceptionTest() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockWorkflowTemplateService.addException(exception); try { - RegionName parent = RegionName.of("[PROJECT]", "[REGION]"); + LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]"); WorkflowTemplate template = WorkflowTemplate.newBuilder().build(); - client.instantiateInlineWorkflowTemplateAsync(parent, template).get(); Assert.fail("No exception raised"); } catch (ExecutionException e) { Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass()); - InvalidArgumentException apiException = (InvalidArgumentException) e.getCause(); + InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause()); Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode()); } } @Test - @SuppressWarnings("all") - public void createWorkflowTemplateTest() { - String id = "id3355"; - WorkflowTemplateName name = - WorkflowTemplateName.ofProjectRegionWorkflowTemplateName( - "[PROJECT]", "[REGION]", "[WORKFLOW_TEMPLATE]"); - int version = 351608024; - WorkflowTemplate expectedResponse = - WorkflowTemplate.newBuilder() - .setId(id) - .setName(name.toString()) - .setVersion(version) + public void instantiateInlineWorkflowTemplateTest2() throws Exception { + Empty expectedResponse = Empty.newBuilder().build(); + Operation resultOperation = + Operation.newBuilder() + .setName("instantiateInlineWorkflowTemplateTest") + .setDone(true) + .setResponse(Any.pack(expectedResponse)) .build(); - mockWorkflowTemplateService.addResponse(expectedResponse); + mockWorkflowTemplateService.addResponse(resultOperation); RegionName parent = RegionName.of("[PROJECT]", "[REGION]"); WorkflowTemplate template = WorkflowTemplate.newBuilder().build(); - WorkflowTemplate actualResponse = client.createWorkflowTemplate(parent, template); - Assert.assertEquals(expectedResponse, actualResponse); + client.instantiateInlineWorkflowTemplateAsync(parent, template).get(); List actualRequests = mockWorkflowTemplateService.getRequests(); Assert.assertEquals(1, actualRequests.size()); - CreateWorkflowTemplateRequest actualRequest = - (CreateWorkflowTemplateRequest) actualRequests.get(0); + InstantiateInlineWorkflowTemplateRequest actualRequest = + ((InstantiateInlineWorkflowTemplateRequest) actualRequests.get(0)); - Assert.assertEquals(parent, RegionName.parse(actualRequest.getParent())); + Assert.assertEquals(parent.toString(), actualRequest.getParent()); Assert.assertEquals(template, actualRequest.getTemplate()); Assert.assertTrue( channelProvider.isHeaderSent( @@ -286,50 +617,45 @@ public void createWorkflowTemplateTest() { } @Test - @SuppressWarnings("all") - public void createWorkflowTemplateExceptionTest() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); + public void instantiateInlineWorkflowTemplateExceptionTest2() throws Exception { + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockWorkflowTemplateService.addException(exception); try { RegionName parent = RegionName.of("[PROJECT]", "[REGION]"); WorkflowTemplate template = WorkflowTemplate.newBuilder().build(); - - client.createWorkflowTemplate(parent, template); + client.instantiateInlineWorkflowTemplateAsync(parent, template).get(); Assert.fail("No exception raised"); - } catch (InvalidArgumentException e) { - // Expected exception + } catch (ExecutionException e) { + Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass()); + InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause()); + Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode()); } } @Test - @SuppressWarnings("all") - public void getWorkflowTemplateTest() { - String id = "id3355"; - WorkflowTemplateName name2 = - WorkflowTemplateName.ofProjectRegionWorkflowTemplateName( - "[PROJECT]", "[REGION]", "[WORKFLOW_TEMPLATE]"); - int version = 351608024; - WorkflowTemplate expectedResponse = - WorkflowTemplate.newBuilder() - .setId(id) - .setName(name2.toString()) - .setVersion(version) + public void instantiateInlineWorkflowTemplateTest3() throws Exception { + Empty expectedResponse = Empty.newBuilder().build(); + Operation resultOperation = + Operation.newBuilder() + .setName("instantiateInlineWorkflowTemplateTest") + .setDone(true) + .setResponse(Any.pack(expectedResponse)) .build(); - mockWorkflowTemplateService.addResponse(expectedResponse); + mockWorkflowTemplateService.addResponse(resultOperation); - WorkflowTemplateName name = - WorkflowTemplateName.ofProjectRegionWorkflowTemplateName( - "[PROJECT]", "[REGION]", "[WORKFLOW_TEMPLATE]"); + String parent = "parent-995424086"; + WorkflowTemplate template = WorkflowTemplate.newBuilder().build(); - WorkflowTemplate actualResponse = client.getWorkflowTemplate(name); - Assert.assertEquals(expectedResponse, actualResponse); + client.instantiateInlineWorkflowTemplateAsync(parent, template).get(); List actualRequests = mockWorkflowTemplateService.getRequests(); Assert.assertEquals(1, actualRequests.size()); - GetWorkflowTemplateRequest actualRequest = (GetWorkflowTemplateRequest) actualRequests.get(0); + InstantiateInlineWorkflowTemplateRequest actualRequest = + ((InstantiateInlineWorkflowTemplateRequest) actualRequests.get(0)); - Assert.assertEquals(name, WorkflowTemplateName.parse(actualRequest.getName())); + Assert.assertEquals(parent, actualRequest.getParent()); + Assert.assertEquals(template, actualRequest.getTemplate()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), @@ -337,36 +663,39 @@ public void getWorkflowTemplateTest() { } @Test - @SuppressWarnings("all") - public void getWorkflowTemplateExceptionTest() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); + public void instantiateInlineWorkflowTemplateExceptionTest3() throws Exception { + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockWorkflowTemplateService.addException(exception); try { - WorkflowTemplateName name = - WorkflowTemplateName.ofProjectRegionWorkflowTemplateName( - "[PROJECT]", "[REGION]", "[WORKFLOW_TEMPLATE]"); - - client.getWorkflowTemplate(name); + String parent = "parent-995424086"; + WorkflowTemplate template = WorkflowTemplate.newBuilder().build(); + client.instantiateInlineWorkflowTemplateAsync(parent, template).get(); Assert.fail("No exception raised"); - } catch (InvalidArgumentException e) { - // Expected exception + } catch (ExecutionException e) { + Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass()); + InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause()); + Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode()); } } @Test - @SuppressWarnings("all") - public void updateWorkflowTemplateTest() { - String id = "id3355"; - WorkflowTemplateName name = - WorkflowTemplateName.ofProjectRegionWorkflowTemplateName( - "[PROJECT]", "[REGION]", "[WORKFLOW_TEMPLATE]"); - int version = 351608024; + public void updateWorkflowTemplateTest() throws Exception { WorkflowTemplate expectedResponse = WorkflowTemplate.newBuilder() - .setId(id) - .setName(name.toString()) - .setVersion(version) + .setId("id3355") + .setName( + WorkflowTemplateName.ofProjectRegionWorkflowTemplateName( + "[PROJECT]", "[REGION]", "[WORKFLOW_TEMPLATE]") + .toString()) + .setVersion(351608024) + .setCreateTime(Timestamp.newBuilder().build()) + .setUpdateTime(Timestamp.newBuilder().build()) + .putAllLabels(new HashMap()) + .setPlacement(WorkflowTemplatePlacement.newBuilder().build()) + .addAllJobs(new ArrayList()) + .addAllParameters(new ArrayList()) + .setDagTimeout(Duration.newBuilder().build()) .build(); mockWorkflowTemplateService.addResponse(expectedResponse); @@ -378,7 +707,7 @@ public void updateWorkflowTemplateTest() { List actualRequests = mockWorkflowTemplateService.getRequests(); Assert.assertEquals(1, actualRequests.size()); UpdateWorkflowTemplateRequest actualRequest = - (UpdateWorkflowTemplateRequest) actualRequests.get(0); + ((UpdateWorkflowTemplateRequest) actualRequests.get(0)); Assert.assertEquals(template, actualRequest.getTemplate()); Assert.assertTrue( @@ -388,48 +717,44 @@ public void updateWorkflowTemplateTest() { } @Test - @SuppressWarnings("all") public void updateWorkflowTemplateExceptionTest() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockWorkflowTemplateService.addException(exception); try { WorkflowTemplate template = WorkflowTemplate.newBuilder().build(); - client.updateWorkflowTemplate(template); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { - // Expected exception + // Expected exception. } } @Test - @SuppressWarnings("all") - public void listWorkflowTemplatesTest() { - String nextPageToken = ""; - WorkflowTemplate templatesElement = WorkflowTemplate.newBuilder().build(); - List templates = Arrays.asList(templatesElement); + public void listWorkflowTemplatesTest() throws Exception { + WorkflowTemplate responsesElement = WorkflowTemplate.newBuilder().build(); ListWorkflowTemplatesResponse expectedResponse = ListWorkflowTemplatesResponse.newBuilder() - .setNextPageToken(nextPageToken) - .addAllTemplates(templates) + .setNextPageToken("") + .addAllTemplates(Arrays.asList(responsesElement)) .build(); mockWorkflowTemplateService.addResponse(expectedResponse); - RegionName parent = RegionName.of("[PROJECT]", "[REGION]"); + LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]"); ListWorkflowTemplatesPagedResponse pagedListResponse = client.listWorkflowTemplates(parent); List resources = Lists.newArrayList(pagedListResponse.iterateAll()); + Assert.assertEquals(1, resources.size()); Assert.assertEquals(expectedResponse.getTemplatesList().get(0), resources.get(0)); List actualRequests = mockWorkflowTemplateService.getRequests(); Assert.assertEquals(1, actualRequests.size()); ListWorkflowTemplatesRequest actualRequest = - (ListWorkflowTemplatesRequest) actualRequests.get(0); + ((ListWorkflowTemplatesRequest) actualRequests.get(0)); - Assert.assertEquals(parent, RegionName.parse(actualRequest.getParent())); + Assert.assertEquals(parent.toString(), actualRequest.getParent()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), @@ -437,24 +762,111 @@ public void listWorkflowTemplatesTest() { } @Test - @SuppressWarnings("all") public void listWorkflowTemplatesExceptionTest() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); + mockWorkflowTemplateService.addException(exception); + + try { + LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]"); + client.listWorkflowTemplates(parent); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception. + } + } + + @Test + public void listWorkflowTemplatesTest2() throws Exception { + WorkflowTemplate responsesElement = WorkflowTemplate.newBuilder().build(); + ListWorkflowTemplatesResponse expectedResponse = + ListWorkflowTemplatesResponse.newBuilder() + .setNextPageToken("") + .addAllTemplates(Arrays.asList(responsesElement)) + .build(); + mockWorkflowTemplateService.addResponse(expectedResponse); + + RegionName parent = RegionName.of("[PROJECT]", "[REGION]"); + + ListWorkflowTemplatesPagedResponse pagedListResponse = client.listWorkflowTemplates(parent); + + List resources = Lists.newArrayList(pagedListResponse.iterateAll()); + + Assert.assertEquals(1, resources.size()); + Assert.assertEquals(expectedResponse.getTemplatesList().get(0), resources.get(0)); + + List actualRequests = mockWorkflowTemplateService.getRequests(); + Assert.assertEquals(1, actualRequests.size()); + ListWorkflowTemplatesRequest actualRequest = + ((ListWorkflowTemplatesRequest) actualRequests.get(0)); + + Assert.assertEquals(parent.toString(), actualRequest.getParent()); + Assert.assertTrue( + channelProvider.isHeaderSent( + ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), + GaxGrpcProperties.getDefaultApiClientHeaderPattern())); + } + + @Test + public void listWorkflowTemplatesExceptionTest2() throws Exception { + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockWorkflowTemplateService.addException(exception); try { RegionName parent = RegionName.of("[PROJECT]", "[REGION]"); + client.listWorkflowTemplates(parent); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception. + } + } + @Test + public void listWorkflowTemplatesTest3() throws Exception { + WorkflowTemplate responsesElement = WorkflowTemplate.newBuilder().build(); + ListWorkflowTemplatesResponse expectedResponse = + ListWorkflowTemplatesResponse.newBuilder() + .setNextPageToken("") + .addAllTemplates(Arrays.asList(responsesElement)) + .build(); + mockWorkflowTemplateService.addResponse(expectedResponse); + + String parent = "parent-995424086"; + + ListWorkflowTemplatesPagedResponse pagedListResponse = client.listWorkflowTemplates(parent); + + List resources = Lists.newArrayList(pagedListResponse.iterateAll()); + + Assert.assertEquals(1, resources.size()); + Assert.assertEquals(expectedResponse.getTemplatesList().get(0), resources.get(0)); + + List actualRequests = mockWorkflowTemplateService.getRequests(); + Assert.assertEquals(1, actualRequests.size()); + ListWorkflowTemplatesRequest actualRequest = + ((ListWorkflowTemplatesRequest) actualRequests.get(0)); + + Assert.assertEquals(parent, actualRequest.getParent()); + Assert.assertTrue( + channelProvider.isHeaderSent( + ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), + GaxGrpcProperties.getDefaultApiClientHeaderPattern())); + } + + @Test + public void listWorkflowTemplatesExceptionTest3() throws Exception { + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); + mockWorkflowTemplateService.addException(exception); + + try { + String parent = "parent-995424086"; client.listWorkflowTemplates(parent); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { - // Expected exception + // Expected exception. } } @Test - @SuppressWarnings("all") - public void deleteWorkflowTemplateTest() { + public void deleteWorkflowTemplateTest() throws Exception { Empty expectedResponse = Empty.newBuilder().build(); mockWorkflowTemplateService.addResponse(expectedResponse); @@ -467,9 +879,9 @@ public void deleteWorkflowTemplateTest() { List actualRequests = mockWorkflowTemplateService.getRequests(); Assert.assertEquals(1, actualRequests.size()); DeleteWorkflowTemplateRequest actualRequest = - (DeleteWorkflowTemplateRequest) actualRequests.get(0); + ((DeleteWorkflowTemplateRequest) actualRequests.get(0)); - Assert.assertEquals(name, WorkflowTemplateName.parse(actualRequest.getName())); + Assert.assertEquals(name.toString(), actualRequest.getName()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), @@ -477,20 +889,53 @@ public void deleteWorkflowTemplateTest() { } @Test - @SuppressWarnings("all") public void deleteWorkflowTemplateExceptionTest() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockWorkflowTemplateService.addException(exception); try { WorkflowTemplateName name = WorkflowTemplateName.ofProjectRegionWorkflowTemplateName( "[PROJECT]", "[REGION]", "[WORKFLOW_TEMPLATE]"); + client.deleteWorkflowTemplate(name); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception. + } + } + @Test + public void deleteWorkflowTemplateTest2() throws Exception { + Empty expectedResponse = Empty.newBuilder().build(); + mockWorkflowTemplateService.addResponse(expectedResponse); + + String name = "name3373707"; + + client.deleteWorkflowTemplate(name); + + List actualRequests = mockWorkflowTemplateService.getRequests(); + Assert.assertEquals(1, actualRequests.size()); + DeleteWorkflowTemplateRequest actualRequest = + ((DeleteWorkflowTemplateRequest) actualRequests.get(0)); + + Assert.assertEquals(name, actualRequest.getName()); + Assert.assertTrue( + channelProvider.isHeaderSent( + ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), + GaxGrpcProperties.getDefaultApiClientHeaderPattern())); + } + + @Test + public void deleteWorkflowTemplateExceptionTest2() throws Exception { + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); + mockWorkflowTemplateService.addException(exception); + + try { + String name = "name3373707"; client.deleteWorkflowTemplate(name); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { - // Expected exception + // Expected exception. } } } diff --git a/grpc-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/WorkflowTemplateServiceGrpc.java b/grpc-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/WorkflowTemplateServiceGrpc.java index 0aeb05ba..0909a3e9 100644 --- a/grpc-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/WorkflowTemplateServiceGrpc.java +++ b/grpc-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/WorkflowTemplateServiceGrpc.java @@ -523,7 +523,8 @@ public void instantiateWorkflowTemplate( *
      * Instantiates a template and begins execution.
      * This method is equivalent to executing the sequence
-     * [CreateWorkflowTemplate][google.cloud.dataproc.v1beta2.WorkflowTemplateService.CreateWorkflowTemplate], [InstantiateWorkflowTemplate][google.cloud.dataproc.v1beta2.WorkflowTemplateService.InstantiateWorkflowTemplate],
+     * [CreateWorkflowTemplate][google.cloud.dataproc.v1beta2.WorkflowTemplateService.CreateWorkflowTemplate],
+     * [InstantiateWorkflowTemplate][google.cloud.dataproc.v1beta2.WorkflowTemplateService.InstantiateWorkflowTemplate],
      * [DeleteWorkflowTemplate][google.cloud.dataproc.v1beta2.WorkflowTemplateService.DeleteWorkflowTemplate].
      * The returned Operation can be used to track execution of
      * workflow by polling
@@ -738,7 +739,8 @@ public void instantiateWorkflowTemplate(
      * 
      * Instantiates a template and begins execution.
      * This method is equivalent to executing the sequence
-     * [CreateWorkflowTemplate][google.cloud.dataproc.v1beta2.WorkflowTemplateService.CreateWorkflowTemplate], [InstantiateWorkflowTemplate][google.cloud.dataproc.v1beta2.WorkflowTemplateService.InstantiateWorkflowTemplate],
+     * [CreateWorkflowTemplate][google.cloud.dataproc.v1beta2.WorkflowTemplateService.CreateWorkflowTemplate],
+     * [InstantiateWorkflowTemplate][google.cloud.dataproc.v1beta2.WorkflowTemplateService.InstantiateWorkflowTemplate],
      * [DeleteWorkflowTemplate][google.cloud.dataproc.v1beta2.WorkflowTemplateService.DeleteWorkflowTemplate].
      * The returned Operation can be used to track execution of
      * workflow by polling
@@ -901,7 +903,8 @@ public com.google.longrunning.Operation instantiateWorkflowTemplate(
      * 
      * Instantiates a template and begins execution.
      * This method is equivalent to executing the sequence
-     * [CreateWorkflowTemplate][google.cloud.dataproc.v1beta2.WorkflowTemplateService.CreateWorkflowTemplate], [InstantiateWorkflowTemplate][google.cloud.dataproc.v1beta2.WorkflowTemplateService.InstantiateWorkflowTemplate],
+     * [CreateWorkflowTemplate][google.cloud.dataproc.v1beta2.WorkflowTemplateService.CreateWorkflowTemplate],
+     * [InstantiateWorkflowTemplate][google.cloud.dataproc.v1beta2.WorkflowTemplateService.InstantiateWorkflowTemplate],
      * [DeleteWorkflowTemplate][google.cloud.dataproc.v1beta2.WorkflowTemplateService.DeleteWorkflowTemplate].
      * The returned Operation can be used to track execution of
      * workflow by polling
@@ -1054,7 +1057,8 @@ protected WorkflowTemplateServiceFutureStub build(
      * 
      * Instantiates a template and begins execution.
      * This method is equivalent to executing the sequence
-     * [CreateWorkflowTemplate][google.cloud.dataproc.v1beta2.WorkflowTemplateService.CreateWorkflowTemplate], [InstantiateWorkflowTemplate][google.cloud.dataproc.v1beta2.WorkflowTemplateService.InstantiateWorkflowTemplate],
+     * [CreateWorkflowTemplate][google.cloud.dataproc.v1beta2.WorkflowTemplateService.CreateWorkflowTemplate],
+     * [InstantiateWorkflowTemplate][google.cloud.dataproc.v1beta2.WorkflowTemplateService.InstantiateWorkflowTemplate],
      * [DeleteWorkflowTemplate][google.cloud.dataproc.v1beta2.WorkflowTemplateService.DeleteWorkflowTemplate].
      * The returned Operation can be used to track execution of
      * workflow by polling
diff --git a/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/AutoscalingPolicyName.java b/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/AutoscalingPolicyName.java
index f93a19bd..7266069a 100644
--- a/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/AutoscalingPolicyName.java
+++ b/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/AutoscalingPolicyName.java
@@ -5,7 +5,7 @@
  * you may not use this file except in compliance with the License.
  * You may obtain a copy of the License at
  *
- *     https://www.apache.org/licenses/LICENSE-2.0
+ *      https://www.apache.org/licenses/LICENSE-2.0
  *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
@@ -26,29 +26,48 @@
 import java.util.List;
 import java.util.Map;
 import java.util.Objects;
+import javax.annotation.Generated;
 
-/** AUTO-GENERATED DOCUMENTATION AND CLASS */
-@javax.annotation.Generated("by GAPIC protoc plugin")
+// AUTO-GENERATED DOCUMENTATION AND CLASS.
+@Generated("by gapic-generator-java")
 public class AutoscalingPolicyName implements ResourceName {
-
-  @Deprecated
-  protected AutoscalingPolicyName() {}
-
-  private static final PathTemplate PROJECT_LOCATION_AUTOSCALING_POLICY_PATH_TEMPLATE =
+  private static final PathTemplate PROJECT_LOCATION_AUTOSCALING_POLICY =
       PathTemplate.createWithoutUrlEncoding(
           "projects/{project}/locations/{location}/autoscalingPolicies/{autoscaling_policy}");
-  private static final PathTemplate PROJECT_REGION_AUTOSCALING_POLICY_PATH_TEMPLATE =
+  private static final PathTemplate PROJECT_REGION_AUTOSCALING_POLICY =
       PathTemplate.createWithoutUrlEncoding(
           "projects/{project}/regions/{region}/autoscalingPolicies/{autoscaling_policy}");
-
   private volatile Map fieldValuesMap;
   private PathTemplate pathTemplate;
   private String fixedValue;
+  private final String project;
+  private final String location;
+  private final String autoscalingPolicy;
+  private final String region;
+
+  @Deprecated
+  protected AutoscalingPolicyName() {
+    project = null;
+    location = null;
+    autoscalingPolicy = null;
+    region = null;
+  }
+
+  private AutoscalingPolicyName(Builder builder) {
+    project = Preconditions.checkNotNull(builder.getProject());
+    location = Preconditions.checkNotNull(builder.getLocation());
+    autoscalingPolicy = Preconditions.checkNotNull(builder.getAutoscalingPolicy());
+    region = null;
+    pathTemplate = PROJECT_LOCATION_AUTOSCALING_POLICY;
+  }
 
-  private String project;
-  private String location;
-  private String autoscalingPolicy;
-  private String region;
+  private AutoscalingPolicyName(ProjectRegionAutoscalingPolicyBuilder builder) {
+    project = Preconditions.checkNotNull(builder.getProject());
+    region = Preconditions.checkNotNull(builder.getRegion());
+    autoscalingPolicy = Preconditions.checkNotNull(builder.getAutoscalingPolicy());
+    location = null;
+    pathTemplate = PROJECT_REGION_AUTOSCALING_POLICY;
+  }
 
   public String getProject() {
     return project;
@@ -66,20 +85,6 @@ public String getRegion() {
     return region;
   }
 
-  private AutoscalingPolicyName(Builder builder) {
-    project = Preconditions.checkNotNull(builder.getProject());
-    location = Preconditions.checkNotNull(builder.getLocation());
-    autoscalingPolicy = Preconditions.checkNotNull(builder.getAutoscalingPolicy());
-    pathTemplate = PROJECT_LOCATION_AUTOSCALING_POLICY_PATH_TEMPLATE;
-  }
-
-  private AutoscalingPolicyName(ProjectRegionAutoscalingPolicyBuilder builder) {
-    project = Preconditions.checkNotNull(builder.getProject());
-    region = Preconditions.checkNotNull(builder.getRegion());
-    autoscalingPolicy = Preconditions.checkNotNull(builder.getAutoscalingPolicy());
-    pathTemplate = PROJECT_REGION_AUTOSCALING_POLICY_PATH_TEMPLATE;
-  }
-
   public static Builder newBuilder() {
     return new Builder();
   }
@@ -100,7 +105,7 @@ public Builder toBuilder() {
 
   public static AutoscalingPolicyName of(
       String project, String location, String autoscalingPolicy) {
-    return newProjectLocationAutoscalingPolicyBuilder()
+    return newBuilder()
         .setProject(project)
         .setLocation(location)
         .setAutoscalingPolicy(autoscalingPolicy)
@@ -110,7 +115,7 @@ public static AutoscalingPolicyName of(
   @BetaApi("The static create methods are not stable yet and may be changed in the future.")
   public static AutoscalingPolicyName ofProjectLocationAutoscalingPolicyName(
       String project, String location, String autoscalingPolicy) {
-    return newProjectLocationAutoscalingPolicyBuilder()
+    return newBuilder()
         .setProject(project)
         .setLocation(location)
         .setAutoscalingPolicy(autoscalingPolicy)
@@ -162,18 +167,17 @@ public static AutoscalingPolicyName parse(String formattedString) {
     if (formattedString.isEmpty()) {
       return null;
     }
-    if (PROJECT_LOCATION_AUTOSCALING_POLICY_PATH_TEMPLATE.matches(formattedString)) {
-      Map matchMap =
-          PROJECT_LOCATION_AUTOSCALING_POLICY_PATH_TEMPLATE.match(formattedString);
+    if (PROJECT_LOCATION_AUTOSCALING_POLICY.matches(formattedString)) {
+      Map matchMap = PROJECT_LOCATION_AUTOSCALING_POLICY.match(formattedString);
       return ofProjectLocationAutoscalingPolicyName(
           matchMap.get("project"), matchMap.get("location"), matchMap.get("autoscaling_policy"));
-    } else if (PROJECT_REGION_AUTOSCALING_POLICY_PATH_TEMPLATE.matches(formattedString)) {
-      Map matchMap =
-          PROJECT_REGION_AUTOSCALING_POLICY_PATH_TEMPLATE.match(formattedString);
+    } else if (PROJECT_REGION_AUTOSCALING_POLICY.matches(formattedString)) {
+      Map matchMap = PROJECT_REGION_AUTOSCALING_POLICY.match(formattedString);
       return ofProjectRegionAutoscalingPolicyName(
           matchMap.get("project"), matchMap.get("region"), matchMap.get("autoscaling_policy"));
     }
-    throw new ValidationException("JobName.parse: formattedString not in valid format");
+    throw new ValidationException(
+        "AutoscalingPolicyName.parse: formattedString not in valid format");
   }
 
   public static List parseList(List formattedStrings) {
@@ -197,8 +201,8 @@ public static List toStringList(List values) {
   }
 
   public static boolean isParsableFrom(String formattedString) {
-    return PROJECT_LOCATION_AUTOSCALING_POLICY_PATH_TEMPLATE.matches(formattedString)
-        || PROJECT_REGION_AUTOSCALING_POLICY_PATH_TEMPLATE.matches(formattedString);
+    return PROJECT_LOCATION_AUTOSCALING_POLICY.matches(formattedString)
+        || PROJECT_REGION_AUTOSCALING_POLICY.matches(formattedString);
   }
 
   @Override
@@ -235,11 +239,41 @@ public String toString() {
     return fixedValue != null ? fixedValue : pathTemplate.instantiate(getFieldValuesMap());
   }
 
+  @Override
+  public boolean equals(Object o) {
+    if (o == this) {
+      return true;
+    }
+    if (o != null || getClass() == o.getClass()) {
+      AutoscalingPolicyName that = ((AutoscalingPolicyName) o);
+      return Objects.equals(this.project, that.project)
+          && Objects.equals(this.location, that.location)
+          && Objects.equals(this.autoscalingPolicy, that.autoscalingPolicy)
+          && Objects.equals(this.region, that.region);
+    }
+    return false;
+  }
+
+  @Override
+  public int hashCode() {
+    int h = 1;
+    h *= 1000003;
+    h ^= Objects.hashCode(fixedValue);
+    h *= 1000003;
+    h ^= Objects.hashCode(project);
+    h *= 1000003;
+    h ^= Objects.hashCode(location);
+    h *= 1000003;
+    h ^= Objects.hashCode(autoscalingPolicy);
+    h *= 1000003;
+    h ^= Objects.hashCode(region);
+    return h;
+  }
+
   /**
    * Builder for projects/{project}/locations/{location}/autoscalingPolicies/{autoscaling_policy}.
    */
   public static class Builder {
-
     private String project;
     private String location;
     private String autoscalingPolicy;
@@ -275,9 +309,8 @@ public Builder setAutoscalingPolicy(String autoscalingPolicy) {
 
     private Builder(AutoscalingPolicyName autoscalingPolicyName) {
       Preconditions.checkArgument(
-          autoscalingPolicyName.pathTemplate == PROJECT_LOCATION_AUTOSCALING_POLICY_PATH_TEMPLATE,
-          "toBuilder is only supported when AutoscalingPolicyName has the pattern of "
-              + "projects/{project}/locations/{location}/autoscalingPolicies/{autoscaling_policy}.");
+          Objects.equals(autoscalingPolicyName.pathTemplate, PROJECT_LOCATION_AUTOSCALING_POLICY),
+          "toBuilder is only supported when AutoscalingPolicyName has the pattern of projects/{project}/locations/{location}/autoscalingPolicies/{autoscaling_policy}");
       project = autoscalingPolicyName.project;
       location = autoscalingPolicyName.location;
       autoscalingPolicy = autoscalingPolicyName.autoscalingPolicy;
@@ -291,12 +324,11 @@ public AutoscalingPolicyName build() {
   /** Builder for projects/{project}/regions/{region}/autoscalingPolicies/{autoscaling_policy}. */
   @BetaApi("The per-pattern Builders are not stable yet and may be changed in the future.")
   public static class ProjectRegionAutoscalingPolicyBuilder {
-
     private String project;
     private String region;
     private String autoscalingPolicy;
 
-    private ProjectRegionAutoscalingPolicyBuilder() {}
+    protected ProjectRegionAutoscalingPolicyBuilder() {}
 
     public String getProject() {
       return project;
@@ -329,35 +361,4 @@ public AutoscalingPolicyName build() {
       return new AutoscalingPolicyName(this);
     }
   }
-
-  @Override
-  public boolean equals(Object o) {
-    if (o == this) {
-      return true;
-    }
-    if (o != null || getClass() == o.getClass()) {
-      AutoscalingPolicyName that = (AutoscalingPolicyName) o;
-      return (Objects.equals(this.project, that.project))
-          && (Objects.equals(this.location, that.location))
-          && (Objects.equals(this.autoscalingPolicy, that.autoscalingPolicy))
-          && (Objects.equals(this.region, that.region));
-    }
-    return false;
-  }
-
-  @Override
-  public int hashCode() {
-    int h = 1;
-    h *= 1000003;
-    h ^= Objects.hashCode(fixedValue);
-    h *= 1000003;
-    h ^= Objects.hashCode(project);
-    h *= 1000003;
-    h ^= Objects.hashCode(location);
-    h *= 1000003;
-    h ^= Objects.hashCode(autoscalingPolicy);
-    h *= 1000003;
-    h ^= Objects.hashCode(region);
-    return h;
-  }
 }
diff --git a/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/LocationName.java b/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/LocationName.java
index ac38238d..94a1ecfb 100644
--- a/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/LocationName.java
+++ b/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/LocationName.java
@@ -5,7 +5,7 @@
  * you may not use this file except in compliance with the License.
  * You may obtain a copy of the License at
  *
- *     https://www.apache.org/licenses/LICENSE-2.0
+ *      https://www.apache.org/licenses/LICENSE-2.0
  *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
@@ -23,19 +23,29 @@
 import java.util.ArrayList;
 import java.util.List;
 import java.util.Map;
+import java.util.Objects;
+import javax.annotation.Generated;
 
-/** AUTO-GENERATED DOCUMENTATION AND CLASS */
-@javax.annotation.Generated("by GAPIC protoc plugin")
+// AUTO-GENERATED DOCUMENTATION AND CLASS.
+@Generated("by gapic-generator-java")
 public class LocationName implements ResourceName {
-
-  private static final PathTemplate PATH_TEMPLATE =
+  private static final PathTemplate PROJECT_LOCATION =
       PathTemplate.createWithoutUrlEncoding("projects/{project}/locations/{location}");
-
   private volatile Map fieldValuesMap;
-
   private final String project;
   private final String location;
 
+  @Deprecated
+  protected LocationName() {
+    project = null;
+    location = null;
+  }
+
+  private LocationName(Builder builder) {
+    project = Preconditions.checkNotNull(builder.getProject());
+    location = Preconditions.checkNotNull(builder.getLocation());
+  }
+
   public String getProject() {
     return project;
   }
@@ -52,11 +62,6 @@ public Builder toBuilder() {
     return new Builder(this);
   }
 
-  private LocationName(Builder builder) {
-    project = Preconditions.checkNotNull(builder.getProject());
-    location = Preconditions.checkNotNull(builder.getLocation());
-  }
-
   public static LocationName of(String project, String location) {
     return newBuilder().setProject(project).setLocation(location).build();
   }
@@ -70,7 +75,7 @@ public static LocationName parse(String formattedString) {
       return null;
     }
     Map matchMap =
-        PATH_TEMPLATE.validatedMatch(
+        PROJECT_LOCATION.validatedMatch(
             formattedString, "LocationName.parse: formattedString not in valid format");
     return of(matchMap.get("project"), matchMap.get("location"));
   }
@@ -84,7 +89,7 @@ public static List parseList(List formattedStrings) {
   }
 
   public static List toStringList(List values) {
-    List list = new ArrayList(values.size());
+    List list = new ArrayList<>(values.size());
     for (LocationName value : values) {
       if (value == null) {
         list.add("");
@@ -96,16 +101,21 @@ public static List toStringList(List values) {
   }
 
   public static boolean isParsableFrom(String formattedString) {
-    return PATH_TEMPLATE.matches(formattedString);
+    return PROJECT_LOCATION.matches(formattedString);
   }
 
+  @Override
   public Map getFieldValuesMap() {
     if (fieldValuesMap == null) {
       synchronized (this) {
         if (fieldValuesMap == null) {
           ImmutableMap.Builder fieldMapBuilder = ImmutableMap.builder();
-          fieldMapBuilder.put("project", project);
-          fieldMapBuilder.put("location", location);
+          if (project != null) {
+            fieldMapBuilder.put("project", project);
+          }
+          if (location != null) {
+            fieldMapBuilder.put("location", location);
+          }
           fieldValuesMap = fieldMapBuilder.build();
         }
       }
@@ -119,15 +129,39 @@ public String getFieldValue(String fieldName) {
 
   @Override
   public String toString() {
-    return PATH_TEMPLATE.instantiate("project", project, "location", location);
+    return PROJECT_LOCATION.instantiate("project", project, "location", location);
   }
 
-  /** Builder for LocationName. */
-  public static class Builder {
+  @Override
+  public boolean equals(Object o) {
+    if (o == this) {
+      return true;
+    }
+    if (o != null || getClass() == o.getClass()) {
+      LocationName that = ((LocationName) o);
+      return Objects.equals(this.project, that.project)
+          && Objects.equals(this.location, that.location);
+    }
+    return false;
+  }
 
+  @Override
+  public int hashCode() {
+    int h = 1;
+    h *= 1000003;
+    h ^= Objects.hashCode(project);
+    h *= 1000003;
+    h ^= Objects.hashCode(location);
+    return h;
+  }
+
+  /** Builder for projects/{project}/locations/{location}. */
+  public static class Builder {
     private String project;
     private String location;
 
+    protected Builder() {}
+
     public String getProject() {
       return project;
     }
@@ -146,8 +180,6 @@ public Builder setLocation(String location) {
       return this;
     }
 
-    private Builder() {}
-
     private Builder(LocationName locationName) {
       project = locationName.project;
       location = locationName.location;
@@ -157,26 +189,4 @@ public LocationName build() {
       return new LocationName(this);
     }
   }
-
-  @Override
-  public boolean equals(Object o) {
-    if (o == this) {
-      return true;
-    }
-    if (o instanceof LocationName) {
-      LocationName that = (LocationName) o;
-      return (this.project.equals(that.project)) && (this.location.equals(that.location));
-    }
-    return false;
-  }
-
-  @Override
-  public int hashCode() {
-    int h = 1;
-    h *= 1000003;
-    h ^= project.hashCode();
-    h *= 1000003;
-    h ^= location.hashCode();
-    return h;
-  }
 }
diff --git a/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/RegionName.java b/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/RegionName.java
index afb2e095..46a8e3cd 100644
--- a/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/RegionName.java
+++ b/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/RegionName.java
@@ -5,7 +5,7 @@
  * you may not use this file except in compliance with the License.
  * You may obtain a copy of the License at
  *
- *     https://www.apache.org/licenses/LICENSE-2.0
+ *      https://www.apache.org/licenses/LICENSE-2.0
  *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
@@ -23,19 +23,29 @@
 import java.util.ArrayList;
 import java.util.List;
 import java.util.Map;
+import java.util.Objects;
+import javax.annotation.Generated;
 
-/** AUTO-GENERATED DOCUMENTATION AND CLASS */
-@javax.annotation.Generated("by GAPIC protoc plugin")
+// AUTO-GENERATED DOCUMENTATION AND CLASS.
+@Generated("by gapic-generator-java")
 public class RegionName implements ResourceName {
-
-  private static final PathTemplate PATH_TEMPLATE =
+  private static final PathTemplate PROJECT_REGION =
       PathTemplate.createWithoutUrlEncoding("projects/{project}/regions/{region}");
-
   private volatile Map fieldValuesMap;
-
   private final String project;
   private final String region;
 
+  @Deprecated
+  protected RegionName() {
+    project = null;
+    region = null;
+  }
+
+  private RegionName(Builder builder) {
+    project = Preconditions.checkNotNull(builder.getProject());
+    region = Preconditions.checkNotNull(builder.getRegion());
+  }
+
   public String getProject() {
     return project;
   }
@@ -52,11 +62,6 @@ public Builder toBuilder() {
     return new Builder(this);
   }
 
-  private RegionName(Builder builder) {
-    project = Preconditions.checkNotNull(builder.getProject());
-    region = Preconditions.checkNotNull(builder.getRegion());
-  }
-
   public static RegionName of(String project, String region) {
     return newBuilder().setProject(project).setRegion(region).build();
   }
@@ -70,7 +75,7 @@ public static RegionName parse(String formattedString) {
       return null;
     }
     Map matchMap =
-        PATH_TEMPLATE.validatedMatch(
+        PROJECT_REGION.validatedMatch(
             formattedString, "RegionName.parse: formattedString not in valid format");
     return of(matchMap.get("project"), matchMap.get("region"));
   }
@@ -84,7 +89,7 @@ public static List parseList(List formattedStrings) {
   }
 
   public static List toStringList(List values) {
-    List list = new ArrayList(values.size());
+    List list = new ArrayList<>(values.size());
     for (RegionName value : values) {
       if (value == null) {
         list.add("");
@@ -96,16 +101,21 @@ public static List toStringList(List values) {
   }
 
   public static boolean isParsableFrom(String formattedString) {
-    return PATH_TEMPLATE.matches(formattedString);
+    return PROJECT_REGION.matches(formattedString);
   }
 
+  @Override
   public Map getFieldValuesMap() {
     if (fieldValuesMap == null) {
       synchronized (this) {
         if (fieldValuesMap == null) {
           ImmutableMap.Builder fieldMapBuilder = ImmutableMap.builder();
-          fieldMapBuilder.put("project", project);
-          fieldMapBuilder.put("region", region);
+          if (project != null) {
+            fieldMapBuilder.put("project", project);
+          }
+          if (region != null) {
+            fieldMapBuilder.put("region", region);
+          }
           fieldValuesMap = fieldMapBuilder.build();
         }
       }
@@ -119,15 +129,38 @@ public String getFieldValue(String fieldName) {
 
   @Override
   public String toString() {
-    return PATH_TEMPLATE.instantiate("project", project, "region", region);
+    return PROJECT_REGION.instantiate("project", project, "region", region);
   }
 
-  /** Builder for RegionName. */
-  public static class Builder {
+  @Override
+  public boolean equals(Object o) {
+    if (o == this) {
+      return true;
+    }
+    if (o != null || getClass() == o.getClass()) {
+      RegionName that = ((RegionName) o);
+      return Objects.equals(this.project, that.project) && Objects.equals(this.region, that.region);
+    }
+    return false;
+  }
 
+  @Override
+  public int hashCode() {
+    int h = 1;
+    h *= 1000003;
+    h ^= Objects.hashCode(project);
+    h *= 1000003;
+    h ^= Objects.hashCode(region);
+    return h;
+  }
+
+  /** Builder for projects/{project}/regions/{region}. */
+  public static class Builder {
     private String project;
     private String region;
 
+    protected Builder() {}
+
     public String getProject() {
       return project;
     }
@@ -146,8 +179,6 @@ public Builder setRegion(String region) {
       return this;
     }
 
-    private Builder() {}
-
     private Builder(RegionName regionName) {
       project = regionName.project;
       region = regionName.region;
@@ -157,26 +188,4 @@ public RegionName build() {
       return new RegionName(this);
     }
   }
-
-  @Override
-  public boolean equals(Object o) {
-    if (o == this) {
-      return true;
-    }
-    if (o instanceof RegionName) {
-      RegionName that = (RegionName) o;
-      return (this.project.equals(that.project)) && (this.region.equals(that.region));
-    }
-    return false;
-  }
-
-  @Override
-  public int hashCode() {
-    int h = 1;
-    h *= 1000003;
-    h ^= project.hashCode();
-    h *= 1000003;
-    h ^= region.hashCode();
-    return h;
-  }
 }
diff --git a/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/WorkflowTemplateName.java b/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/WorkflowTemplateName.java
index 167a8a57..b471a5b9 100644
--- a/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/WorkflowTemplateName.java
+++ b/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/WorkflowTemplateName.java
@@ -5,7 +5,7 @@
  * you may not use this file except in compliance with the License.
  * You may obtain a copy of the License at
  *
- *     https://www.apache.org/licenses/LICENSE-2.0
+ *      https://www.apache.org/licenses/LICENSE-2.0
  *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
@@ -26,29 +26,48 @@
 import java.util.List;
 import java.util.Map;
 import java.util.Objects;
+import javax.annotation.Generated;
 
-/** AUTO-GENERATED DOCUMENTATION AND CLASS */
-@javax.annotation.Generated("by GAPIC protoc plugin")
+// AUTO-GENERATED DOCUMENTATION AND CLASS.
+@Generated("by gapic-generator-java")
 public class WorkflowTemplateName implements ResourceName {
-
-  @Deprecated
-  protected WorkflowTemplateName() {}
-
-  private static final PathTemplate PROJECT_REGION_WORKFLOW_TEMPLATE_PATH_TEMPLATE =
+  private static final PathTemplate PROJECT_REGION_WORKFLOW_TEMPLATE =
       PathTemplate.createWithoutUrlEncoding(
           "projects/{project}/regions/{region}/workflowTemplates/{workflow_template}");
-  private static final PathTemplate PROJECT_LOCATION_WORKFLOW_TEMPLATE_PATH_TEMPLATE =
+  private static final PathTemplate PROJECT_LOCATION_WORKFLOW_TEMPLATE =
       PathTemplate.createWithoutUrlEncoding(
           "projects/{project}/locations/{location}/workflowTemplates/{workflow_template}");
-
   private volatile Map fieldValuesMap;
   private PathTemplate pathTemplate;
   private String fixedValue;
+  private final String project;
+  private final String region;
+  private final String workflowTemplate;
+  private final String location;
+
+  @Deprecated
+  protected WorkflowTemplateName() {
+    project = null;
+    region = null;
+    workflowTemplate = null;
+    location = null;
+  }
+
+  private WorkflowTemplateName(Builder builder) {
+    project = Preconditions.checkNotNull(builder.getProject());
+    region = Preconditions.checkNotNull(builder.getRegion());
+    workflowTemplate = Preconditions.checkNotNull(builder.getWorkflowTemplate());
+    location = null;
+    pathTemplate = PROJECT_REGION_WORKFLOW_TEMPLATE;
+  }
 
-  private String project;
-  private String region;
-  private String workflowTemplate;
-  private String location;
+  private WorkflowTemplateName(ProjectLocationWorkflowTemplateBuilder builder) {
+    project = Preconditions.checkNotNull(builder.getProject());
+    location = Preconditions.checkNotNull(builder.getLocation());
+    workflowTemplate = Preconditions.checkNotNull(builder.getWorkflowTemplate());
+    region = null;
+    pathTemplate = PROJECT_LOCATION_WORKFLOW_TEMPLATE;
+  }
 
   public String getProject() {
     return project;
@@ -66,20 +85,6 @@ public String getLocation() {
     return location;
   }
 
-  private WorkflowTemplateName(Builder builder) {
-    project = Preconditions.checkNotNull(builder.getProject());
-    region = Preconditions.checkNotNull(builder.getRegion());
-    workflowTemplate = Preconditions.checkNotNull(builder.getWorkflowTemplate());
-    pathTemplate = PROJECT_REGION_WORKFLOW_TEMPLATE_PATH_TEMPLATE;
-  }
-
-  private WorkflowTemplateName(ProjectLocationWorkflowTemplateBuilder builder) {
-    project = Preconditions.checkNotNull(builder.getProject());
-    location = Preconditions.checkNotNull(builder.getLocation());
-    workflowTemplate = Preconditions.checkNotNull(builder.getWorkflowTemplate());
-    pathTemplate = PROJECT_LOCATION_WORKFLOW_TEMPLATE_PATH_TEMPLATE;
-  }
-
   public static Builder newBuilder() {
     return new Builder();
   }
@@ -99,7 +104,7 @@ public Builder toBuilder() {
   }
 
   public static WorkflowTemplateName of(String project, String region, String workflowTemplate) {
-    return newProjectRegionWorkflowTemplateBuilder()
+    return newBuilder()
         .setProject(project)
         .setRegion(region)
         .setWorkflowTemplate(workflowTemplate)
@@ -109,7 +114,7 @@ public static WorkflowTemplateName of(String project, String region, String work
   @BetaApi("The static create methods are not stable yet and may be changed in the future.")
   public static WorkflowTemplateName ofProjectRegionWorkflowTemplateName(
       String project, String region, String workflowTemplate) {
-    return newProjectRegionWorkflowTemplateBuilder()
+    return newBuilder()
         .setProject(project)
         .setRegion(region)
         .setWorkflowTemplate(workflowTemplate)
@@ -161,18 +166,17 @@ public static WorkflowTemplateName parse(String formattedString) {
     if (formattedString.isEmpty()) {
       return null;
     }
-    if (PROJECT_REGION_WORKFLOW_TEMPLATE_PATH_TEMPLATE.matches(formattedString)) {
-      Map matchMap =
-          PROJECT_REGION_WORKFLOW_TEMPLATE_PATH_TEMPLATE.match(formattedString);
+    if (PROJECT_REGION_WORKFLOW_TEMPLATE.matches(formattedString)) {
+      Map matchMap = PROJECT_REGION_WORKFLOW_TEMPLATE.match(formattedString);
       return ofProjectRegionWorkflowTemplateName(
           matchMap.get("project"), matchMap.get("region"), matchMap.get("workflow_template"));
-    } else if (PROJECT_LOCATION_WORKFLOW_TEMPLATE_PATH_TEMPLATE.matches(formattedString)) {
-      Map matchMap =
-          PROJECT_LOCATION_WORKFLOW_TEMPLATE_PATH_TEMPLATE.match(formattedString);
+    } else if (PROJECT_LOCATION_WORKFLOW_TEMPLATE.matches(formattedString)) {
+      Map matchMap = PROJECT_LOCATION_WORKFLOW_TEMPLATE.match(formattedString);
       return ofProjectLocationWorkflowTemplateName(
           matchMap.get("project"), matchMap.get("location"), matchMap.get("workflow_template"));
     }
-    throw new ValidationException("JobName.parse: formattedString not in valid format");
+    throw new ValidationException(
+        "WorkflowTemplateName.parse: formattedString not in valid format");
   }
 
   public static List parseList(List formattedStrings) {
@@ -196,8 +200,8 @@ public static List toStringList(List values) {
   }
 
   public static boolean isParsableFrom(String formattedString) {
-    return PROJECT_REGION_WORKFLOW_TEMPLATE_PATH_TEMPLATE.matches(formattedString)
-        || PROJECT_LOCATION_WORKFLOW_TEMPLATE_PATH_TEMPLATE.matches(formattedString);
+    return PROJECT_REGION_WORKFLOW_TEMPLATE.matches(formattedString)
+        || PROJECT_LOCATION_WORKFLOW_TEMPLATE.matches(formattedString);
   }
 
   @Override
@@ -234,9 +238,39 @@ public String toString() {
     return fixedValue != null ? fixedValue : pathTemplate.instantiate(getFieldValuesMap());
   }
 
+  @Override
+  public boolean equals(Object o) {
+    if (o == this) {
+      return true;
+    }
+    if (o != null || getClass() == o.getClass()) {
+      WorkflowTemplateName that = ((WorkflowTemplateName) o);
+      return Objects.equals(this.project, that.project)
+          && Objects.equals(this.region, that.region)
+          && Objects.equals(this.workflowTemplate, that.workflowTemplate)
+          && Objects.equals(this.location, that.location);
+    }
+    return false;
+  }
+
+  @Override
+  public int hashCode() {
+    int h = 1;
+    h *= 1000003;
+    h ^= Objects.hashCode(fixedValue);
+    h *= 1000003;
+    h ^= Objects.hashCode(project);
+    h *= 1000003;
+    h ^= Objects.hashCode(region);
+    h *= 1000003;
+    h ^= Objects.hashCode(workflowTemplate);
+    h *= 1000003;
+    h ^= Objects.hashCode(location);
+    return h;
+  }
+
   /** Builder for projects/{project}/regions/{region}/workflowTemplates/{workflow_template}. */
   public static class Builder {
-
     private String project;
     private String region;
     private String workflowTemplate;
@@ -272,9 +306,8 @@ public Builder setWorkflowTemplate(String workflowTemplate) {
 
     private Builder(WorkflowTemplateName workflowTemplateName) {
       Preconditions.checkArgument(
-          workflowTemplateName.pathTemplate == PROJECT_REGION_WORKFLOW_TEMPLATE_PATH_TEMPLATE,
-          "toBuilder is only supported when WorkflowTemplateName has the pattern of "
-              + "projects/{project}/regions/{region}/workflowTemplates/{workflow_template}.");
+          Objects.equals(workflowTemplateName.pathTemplate, PROJECT_REGION_WORKFLOW_TEMPLATE),
+          "toBuilder is only supported when WorkflowTemplateName has the pattern of projects/{project}/regions/{region}/workflowTemplates/{workflow_template}");
       project = workflowTemplateName.project;
       region = workflowTemplateName.region;
       workflowTemplate = workflowTemplateName.workflowTemplate;
@@ -288,12 +321,11 @@ public WorkflowTemplateName build() {
   /** Builder for projects/{project}/locations/{location}/workflowTemplates/{workflow_template}. */
   @BetaApi("The per-pattern Builders are not stable yet and may be changed in the future.")
   public static class ProjectLocationWorkflowTemplateBuilder {
-
     private String project;
     private String location;
     private String workflowTemplate;
 
-    private ProjectLocationWorkflowTemplateBuilder() {}
+    protected ProjectLocationWorkflowTemplateBuilder() {}
 
     public String getProject() {
       return project;
@@ -326,35 +358,4 @@ public WorkflowTemplateName build() {
       return new WorkflowTemplateName(this);
     }
   }
-
-  @Override
-  public boolean equals(Object o) {
-    if (o == this) {
-      return true;
-    }
-    if (o != null || getClass() == o.getClass()) {
-      WorkflowTemplateName that = (WorkflowTemplateName) o;
-      return (Objects.equals(this.project, that.project))
-          && (Objects.equals(this.region, that.region))
-          && (Objects.equals(this.workflowTemplate, that.workflowTemplate))
-          && (Objects.equals(this.location, that.location));
-    }
-    return false;
-  }
-
-  @Override
-  public int hashCode() {
-    int h = 1;
-    h *= 1000003;
-    h ^= Objects.hashCode(fixedValue);
-    h *= 1000003;
-    h ^= Objects.hashCode(project);
-    h *= 1000003;
-    h ^= Objects.hashCode(region);
-    h *= 1000003;
-    h ^= Objects.hashCode(workflowTemplate);
-    h *= 1000003;
-    h ^= Objects.hashCode(location);
-    return h;
-  }
 }
diff --git a/proto-google-cloud-dataproc-v1beta2/clirr-ignored-differences.xml b/proto-google-cloud-dataproc-v1beta2/clirr-ignored-differences.xml
index afdef514..e4109312 100644
--- a/proto-google-cloud-dataproc-v1beta2/clirr-ignored-differences.xml
+++ b/proto-google-cloud-dataproc-v1beta2/clirr-ignored-differences.xml
@@ -16,4 +16,26 @@
     com/google/cloud/dataproc/v1beta2/*OrBuilder
     boolean has*(*)
   
+
+  
+  
+    6001
+    com/google/cloud/dataproc/v1beta2/Component
+    DOCKER
+  
+  
+    6011
+    com/google/cloud/dataproc/v1beta2/Component
+    DOCKER_VALUE
+  
+  
+    6001
+    com/google/cloud/dataproc/v1beta2/Component
+    FLINK
+  
+  
+    6011
+    com/google/cloud/dataproc/v1beta2/Component
+    FLINK_VALUE
+  
 
diff --git a/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/AutoscalingPolicyName.java b/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/AutoscalingPolicyName.java
index a7b158d2..a04de167 100644
--- a/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/AutoscalingPolicyName.java
+++ b/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/AutoscalingPolicyName.java
@@ -5,7 +5,7 @@
  * you may not use this file except in compliance with the License.
  * You may obtain a copy of the License at
  *
- *     https://www.apache.org/licenses/LICENSE-2.0
+ *      https://www.apache.org/licenses/LICENSE-2.0
  *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
@@ -26,29 +26,48 @@
 import java.util.List;
 import java.util.Map;
 import java.util.Objects;
+import javax.annotation.Generated;
 
-/** AUTO-GENERATED DOCUMENTATION AND CLASS */
-@javax.annotation.Generated("by GAPIC protoc plugin")
+// AUTO-GENERATED DOCUMENTATION AND CLASS.
+@Generated("by gapic-generator-java")
 public class AutoscalingPolicyName implements ResourceName {
-
-  @Deprecated
-  protected AutoscalingPolicyName() {}
-
-  private static final PathTemplate PROJECT_LOCATION_AUTOSCALING_POLICY_PATH_TEMPLATE =
+  private static final PathTemplate PROJECT_LOCATION_AUTOSCALING_POLICY =
       PathTemplate.createWithoutUrlEncoding(
           "projects/{project}/locations/{location}/autoscalingPolicies/{autoscaling_policy}");
-  private static final PathTemplate PROJECT_REGION_AUTOSCALING_POLICY_PATH_TEMPLATE =
+  private static final PathTemplate PROJECT_REGION_AUTOSCALING_POLICY =
       PathTemplate.createWithoutUrlEncoding(
           "projects/{project}/regions/{region}/autoscalingPolicies/{autoscaling_policy}");
-
   private volatile Map fieldValuesMap;
   private PathTemplate pathTemplate;
   private String fixedValue;
+  private final String project;
+  private final String location;
+  private final String autoscalingPolicy;
+  private final String region;
+
+  @Deprecated
+  protected AutoscalingPolicyName() {
+    project = null;
+    location = null;
+    autoscalingPolicy = null;
+    region = null;
+  }
+
+  private AutoscalingPolicyName(Builder builder) {
+    project = Preconditions.checkNotNull(builder.getProject());
+    location = Preconditions.checkNotNull(builder.getLocation());
+    autoscalingPolicy = Preconditions.checkNotNull(builder.getAutoscalingPolicy());
+    region = null;
+    pathTemplate = PROJECT_LOCATION_AUTOSCALING_POLICY;
+  }
 
-  private String project;
-  private String location;
-  private String autoscalingPolicy;
-  private String region;
+  private AutoscalingPolicyName(ProjectRegionAutoscalingPolicyBuilder builder) {
+    project = Preconditions.checkNotNull(builder.getProject());
+    region = Preconditions.checkNotNull(builder.getRegion());
+    autoscalingPolicy = Preconditions.checkNotNull(builder.getAutoscalingPolicy());
+    location = null;
+    pathTemplate = PROJECT_REGION_AUTOSCALING_POLICY;
+  }
 
   public String getProject() {
     return project;
@@ -66,20 +85,6 @@ public String getRegion() {
     return region;
   }
 
-  private AutoscalingPolicyName(Builder builder) {
-    project = Preconditions.checkNotNull(builder.getProject());
-    location = Preconditions.checkNotNull(builder.getLocation());
-    autoscalingPolicy = Preconditions.checkNotNull(builder.getAutoscalingPolicy());
-    pathTemplate = PROJECT_LOCATION_AUTOSCALING_POLICY_PATH_TEMPLATE;
-  }
-
-  private AutoscalingPolicyName(ProjectRegionAutoscalingPolicyBuilder builder) {
-    project = Preconditions.checkNotNull(builder.getProject());
-    region = Preconditions.checkNotNull(builder.getRegion());
-    autoscalingPolicy = Preconditions.checkNotNull(builder.getAutoscalingPolicy());
-    pathTemplate = PROJECT_REGION_AUTOSCALING_POLICY_PATH_TEMPLATE;
-  }
-
   public static Builder newBuilder() {
     return new Builder();
   }
@@ -100,7 +105,7 @@ public Builder toBuilder() {
 
   public static AutoscalingPolicyName of(
       String project, String location, String autoscalingPolicy) {
-    return newProjectLocationAutoscalingPolicyBuilder()
+    return newBuilder()
         .setProject(project)
         .setLocation(location)
         .setAutoscalingPolicy(autoscalingPolicy)
@@ -110,7 +115,7 @@ public static AutoscalingPolicyName of(
   @BetaApi("The static create methods are not stable yet and may be changed in the future.")
   public static AutoscalingPolicyName ofProjectLocationAutoscalingPolicyName(
       String project, String location, String autoscalingPolicy) {
-    return newProjectLocationAutoscalingPolicyBuilder()
+    return newBuilder()
         .setProject(project)
         .setLocation(location)
         .setAutoscalingPolicy(autoscalingPolicy)
@@ -162,18 +167,17 @@ public static AutoscalingPolicyName parse(String formattedString) {
     if (formattedString.isEmpty()) {
       return null;
     }
-    if (PROJECT_LOCATION_AUTOSCALING_POLICY_PATH_TEMPLATE.matches(formattedString)) {
-      Map matchMap =
-          PROJECT_LOCATION_AUTOSCALING_POLICY_PATH_TEMPLATE.match(formattedString);
+    if (PROJECT_LOCATION_AUTOSCALING_POLICY.matches(formattedString)) {
+      Map matchMap = PROJECT_LOCATION_AUTOSCALING_POLICY.match(formattedString);
       return ofProjectLocationAutoscalingPolicyName(
           matchMap.get("project"), matchMap.get("location"), matchMap.get("autoscaling_policy"));
-    } else if (PROJECT_REGION_AUTOSCALING_POLICY_PATH_TEMPLATE.matches(formattedString)) {
-      Map matchMap =
-          PROJECT_REGION_AUTOSCALING_POLICY_PATH_TEMPLATE.match(formattedString);
+    } else if (PROJECT_REGION_AUTOSCALING_POLICY.matches(formattedString)) {
+      Map matchMap = PROJECT_REGION_AUTOSCALING_POLICY.match(formattedString);
       return ofProjectRegionAutoscalingPolicyName(
           matchMap.get("project"), matchMap.get("region"), matchMap.get("autoscaling_policy"));
     }
-    throw new ValidationException("JobName.parse: formattedString not in valid format");
+    throw new ValidationException(
+        "AutoscalingPolicyName.parse: formattedString not in valid format");
   }
 
   public static List parseList(List formattedStrings) {
@@ -197,8 +201,8 @@ public static List toStringList(List values) {
   }
 
   public static boolean isParsableFrom(String formattedString) {
-    return PROJECT_LOCATION_AUTOSCALING_POLICY_PATH_TEMPLATE.matches(formattedString)
-        || PROJECT_REGION_AUTOSCALING_POLICY_PATH_TEMPLATE.matches(formattedString);
+    return PROJECT_LOCATION_AUTOSCALING_POLICY.matches(formattedString)
+        || PROJECT_REGION_AUTOSCALING_POLICY.matches(formattedString);
   }
 
   @Override
@@ -235,11 +239,41 @@ public String toString() {
     return fixedValue != null ? fixedValue : pathTemplate.instantiate(getFieldValuesMap());
   }
 
+  @Override
+  public boolean equals(Object o) {
+    if (o == this) {
+      return true;
+    }
+    if (o != null || getClass() == o.getClass()) {
+      AutoscalingPolicyName that = ((AutoscalingPolicyName) o);
+      return Objects.equals(this.project, that.project)
+          && Objects.equals(this.location, that.location)
+          && Objects.equals(this.autoscalingPolicy, that.autoscalingPolicy)
+          && Objects.equals(this.region, that.region);
+    }
+    return false;
+  }
+
+  @Override
+  public int hashCode() {
+    int h = 1;
+    h *= 1000003;
+    h ^= Objects.hashCode(fixedValue);
+    h *= 1000003;
+    h ^= Objects.hashCode(project);
+    h *= 1000003;
+    h ^= Objects.hashCode(location);
+    h *= 1000003;
+    h ^= Objects.hashCode(autoscalingPolicy);
+    h *= 1000003;
+    h ^= Objects.hashCode(region);
+    return h;
+  }
+
   /**
    * Builder for projects/{project}/locations/{location}/autoscalingPolicies/{autoscaling_policy}.
    */
   public static class Builder {
-
     private String project;
     private String location;
     private String autoscalingPolicy;
@@ -275,9 +309,8 @@ public Builder setAutoscalingPolicy(String autoscalingPolicy) {
 
     private Builder(AutoscalingPolicyName autoscalingPolicyName) {
       Preconditions.checkArgument(
-          autoscalingPolicyName.pathTemplate == PROJECT_LOCATION_AUTOSCALING_POLICY_PATH_TEMPLATE,
-          "toBuilder is only supported when AutoscalingPolicyName has the pattern of "
-              + "projects/{project}/locations/{location}/autoscalingPolicies/{autoscaling_policy}.");
+          Objects.equals(autoscalingPolicyName.pathTemplate, PROJECT_LOCATION_AUTOSCALING_POLICY),
+          "toBuilder is only supported when AutoscalingPolicyName has the pattern of projects/{project}/locations/{location}/autoscalingPolicies/{autoscaling_policy}");
       project = autoscalingPolicyName.project;
       location = autoscalingPolicyName.location;
       autoscalingPolicy = autoscalingPolicyName.autoscalingPolicy;
@@ -291,12 +324,11 @@ public AutoscalingPolicyName build() {
   /** Builder for projects/{project}/regions/{region}/autoscalingPolicies/{autoscaling_policy}. */
   @BetaApi("The per-pattern Builders are not stable yet and may be changed in the future.")
   public static class ProjectRegionAutoscalingPolicyBuilder {
-
     private String project;
     private String region;
     private String autoscalingPolicy;
 
-    private ProjectRegionAutoscalingPolicyBuilder() {}
+    protected ProjectRegionAutoscalingPolicyBuilder() {}
 
     public String getProject() {
       return project;
@@ -329,35 +361,4 @@ public AutoscalingPolicyName build() {
       return new AutoscalingPolicyName(this);
     }
   }
-
-  @Override
-  public boolean equals(Object o) {
-    if (o == this) {
-      return true;
-    }
-    if (o != null || getClass() == o.getClass()) {
-      AutoscalingPolicyName that = (AutoscalingPolicyName) o;
-      return (Objects.equals(this.project, that.project))
-          && (Objects.equals(this.location, that.location))
-          && (Objects.equals(this.autoscalingPolicy, that.autoscalingPolicy))
-          && (Objects.equals(this.region, that.region));
-    }
-    return false;
-  }
-
-  @Override
-  public int hashCode() {
-    int h = 1;
-    h *= 1000003;
-    h ^= Objects.hashCode(fixedValue);
-    h *= 1000003;
-    h ^= Objects.hashCode(project);
-    h *= 1000003;
-    h ^= Objects.hashCode(location);
-    h *= 1000003;
-    h ^= Objects.hashCode(autoscalingPolicy);
-    h *= 1000003;
-    h ^= Objects.hashCode(region);
-    return h;
-  }
 }
diff --git a/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/BasicYarnAutoscalingConfig.java b/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/BasicYarnAutoscalingConfig.java
index 31f1bcb0..6b0f63cc 100644
--- a/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/BasicYarnAutoscalingConfig.java
+++ b/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/BasicYarnAutoscalingConfig.java
@@ -209,11 +209,14 @@ public com.google.protobuf.DurationOrBuilder getGracefulDecommissionTimeoutOrBui
    *
    *
    * 
-   * Required. Fraction of average pending memory in the last cooldown period
-   * for which to add workers. A scale-up factor of 1.0 will result in scaling
-   * up so that there is no pending memory remaining after the update (more
-   * aggressive scaling). A scale-up factor closer to 0 will result in a smaller
-   * magnitude of scaling up (less aggressive scaling).
+   * Required. Fraction of average YARN pending memory in the last cooldown
+   * period for which to add workers. A scale-up factor of 1.0 will result in
+   * scaling up so that there is no pending memory remaining after the update
+   * (more aggressive scaling). A scale-up factor closer to 0 will result in a
+   * smaller magnitude of scaling up (less aggressive scaling). See [How
+   * autoscaling
+   * works](https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/autoscaling#how_autoscaling_works)
+   * for more information.
    * Bounds: [0.0, 1.0].
    * 
* @@ -232,11 +235,14 @@ public double getScaleUpFactor() { * * *
-   * Required. Fraction of average pending memory in the last cooldown period
-   * for which to remove workers. A scale-down factor of 1 will result in
+   * Required. Fraction of average YARN pending memory in the last cooldown
+   * period for which to remove workers. A scale-down factor of 1 will result in
    * scaling down so that there is no available memory remaining after the
    * update (more aggressive scaling). A scale-down factor of 0 disables
    * removing workers, which can be beneficial for autoscaling a single job.
+   * See [How autoscaling
+   * works](https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/autoscaling#how_autoscaling_works)
+   * for more information.
    * Bounds: [0.0, 1.0].
    * 
* @@ -954,11 +960,14 @@ public com.google.protobuf.DurationOrBuilder getGracefulDecommissionTimeoutOrBui * * *
-     * Required. Fraction of average pending memory in the last cooldown period
-     * for which to add workers. A scale-up factor of 1.0 will result in scaling
-     * up so that there is no pending memory remaining after the update (more
-     * aggressive scaling). A scale-up factor closer to 0 will result in a smaller
-     * magnitude of scaling up (less aggressive scaling).
+     * Required. Fraction of average YARN pending memory in the last cooldown
+     * period for which to add workers. A scale-up factor of 1.0 will result in
+     * scaling up so that there is no pending memory remaining after the update
+     * (more aggressive scaling). A scale-up factor closer to 0 will result in a
+     * smaller magnitude of scaling up (less aggressive scaling). See [How
+     * autoscaling
+     * works](https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/autoscaling#how_autoscaling_works)
+     * for more information.
      * Bounds: [0.0, 1.0].
      * 
* @@ -974,11 +983,14 @@ public double getScaleUpFactor() { * * *
-     * Required. Fraction of average pending memory in the last cooldown period
-     * for which to add workers. A scale-up factor of 1.0 will result in scaling
-     * up so that there is no pending memory remaining after the update (more
-     * aggressive scaling). A scale-up factor closer to 0 will result in a smaller
-     * magnitude of scaling up (less aggressive scaling).
+     * Required. Fraction of average YARN pending memory in the last cooldown
+     * period for which to add workers. A scale-up factor of 1.0 will result in
+     * scaling up so that there is no pending memory remaining after the update
+     * (more aggressive scaling). A scale-up factor closer to 0 will result in a
+     * smaller magnitude of scaling up (less aggressive scaling). See [How
+     * autoscaling
+     * works](https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/autoscaling#how_autoscaling_works)
+     * for more information.
      * Bounds: [0.0, 1.0].
      * 
* @@ -997,11 +1009,14 @@ public Builder setScaleUpFactor(double value) { * * *
-     * Required. Fraction of average pending memory in the last cooldown period
-     * for which to add workers. A scale-up factor of 1.0 will result in scaling
-     * up so that there is no pending memory remaining after the update (more
-     * aggressive scaling). A scale-up factor closer to 0 will result in a smaller
-     * magnitude of scaling up (less aggressive scaling).
+     * Required. Fraction of average YARN pending memory in the last cooldown
+     * period for which to add workers. A scale-up factor of 1.0 will result in
+     * scaling up so that there is no pending memory remaining after the update
+     * (more aggressive scaling). A scale-up factor closer to 0 will result in a
+     * smaller magnitude of scaling up (less aggressive scaling). See [How
+     * autoscaling
+     * works](https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/autoscaling#how_autoscaling_works)
+     * for more information.
      * Bounds: [0.0, 1.0].
      * 
* @@ -1021,11 +1036,14 @@ public Builder clearScaleUpFactor() { * * *
-     * Required. Fraction of average pending memory in the last cooldown period
-     * for which to remove workers. A scale-down factor of 1 will result in
+     * Required. Fraction of average YARN pending memory in the last cooldown
+     * period for which to remove workers. A scale-down factor of 1 will result in
      * scaling down so that there is no available memory remaining after the
      * update (more aggressive scaling). A scale-down factor of 0 disables
      * removing workers, which can be beneficial for autoscaling a single job.
+     * See [How autoscaling
+     * works](https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/autoscaling#how_autoscaling_works)
+     * for more information.
      * Bounds: [0.0, 1.0].
      * 
* @@ -1041,11 +1059,14 @@ public double getScaleDownFactor() { * * *
-     * Required. Fraction of average pending memory in the last cooldown period
-     * for which to remove workers. A scale-down factor of 1 will result in
+     * Required. Fraction of average YARN pending memory in the last cooldown
+     * period for which to remove workers. A scale-down factor of 1 will result in
      * scaling down so that there is no available memory remaining after the
      * update (more aggressive scaling). A scale-down factor of 0 disables
      * removing workers, which can be beneficial for autoscaling a single job.
+     * See [How autoscaling
+     * works](https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/autoscaling#how_autoscaling_works)
+     * for more information.
      * Bounds: [0.0, 1.0].
      * 
* @@ -1064,11 +1085,14 @@ public Builder setScaleDownFactor(double value) { * * *
-     * Required. Fraction of average pending memory in the last cooldown period
-     * for which to remove workers. A scale-down factor of 1 will result in
+     * Required. Fraction of average YARN pending memory in the last cooldown
+     * period for which to remove workers. A scale-down factor of 1 will result in
      * scaling down so that there is no available memory remaining after the
      * update (more aggressive scaling). A scale-down factor of 0 disables
      * removing workers, which can be beneficial for autoscaling a single job.
+     * See [How autoscaling
+     * works](https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/autoscaling#how_autoscaling_works)
+     * for more information.
      * Bounds: [0.0, 1.0].
      * 
* diff --git a/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/BasicYarnAutoscalingConfigOrBuilder.java b/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/BasicYarnAutoscalingConfigOrBuilder.java index 25de99ad..53b3d8bd 100644 --- a/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/BasicYarnAutoscalingConfigOrBuilder.java +++ b/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/BasicYarnAutoscalingConfigOrBuilder.java @@ -80,11 +80,14 @@ public interface BasicYarnAutoscalingConfigOrBuilder * * *
-   * Required. Fraction of average pending memory in the last cooldown period
-   * for which to add workers. A scale-up factor of 1.0 will result in scaling
-   * up so that there is no pending memory remaining after the update (more
-   * aggressive scaling). A scale-up factor closer to 0 will result in a smaller
-   * magnitude of scaling up (less aggressive scaling).
+   * Required. Fraction of average YARN pending memory in the last cooldown
+   * period for which to add workers. A scale-up factor of 1.0 will result in
+   * scaling up so that there is no pending memory remaining after the update
+   * (more aggressive scaling). A scale-up factor closer to 0 will result in a
+   * smaller magnitude of scaling up (less aggressive scaling). See [How
+   * autoscaling
+   * works](https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/autoscaling#how_autoscaling_works)
+   * for more information.
    * Bounds: [0.0, 1.0].
    * 
* @@ -98,11 +101,14 @@ public interface BasicYarnAutoscalingConfigOrBuilder * * *
-   * Required. Fraction of average pending memory in the last cooldown period
-   * for which to remove workers. A scale-down factor of 1 will result in
+   * Required. Fraction of average YARN pending memory in the last cooldown
+   * period for which to remove workers. A scale-down factor of 1 will result in
    * scaling down so that there is no available memory remaining after the
    * update (more aggressive scaling). A scale-down factor of 0 disables
    * removing workers, which can be beneficial for autoscaling a single job.
+   * See [How autoscaling
+   * works](https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/autoscaling#how_autoscaling_works)
+   * for more information.
    * Bounds: [0.0, 1.0].
    * 
* diff --git a/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/ClusterConfig.java b/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/ClusterConfig.java index 1526c99a..4c00d031 100644 --- a/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/ClusterConfig.java +++ b/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/ClusterConfig.java @@ -39,6 +39,7 @@ private ClusterConfig(com.google.protobuf.GeneratedMessageV3.Builder builder) private ClusterConfig() { configBucket_ = ""; + tempBucket_ = ""; initializationActions_ = java.util.Collections.emptyList(); } @@ -79,6 +80,13 @@ private ClusterConfig( configBucket_ = s; break; } + case 18: + { + java.lang.String s = input.readStringRequireUtf8(); + + tempBucket_ = s; + break; + } case 66: { com.google.cloud.dataproc.v1beta2.GceClusterConfig.Builder subBuilder = null; @@ -379,6 +387,67 @@ public com.google.protobuf.ByteString getConfigBucketBytes() { } } + public static final int TEMP_BUCKET_FIELD_NUMBER = 2; + private volatile java.lang.Object tempBucket_; + /** + * + * + *
+   * Optional. A Cloud Storage bucket used to store ephemeral cluster and jobs
+   * data, such as Spark and MapReduce history files. If you do not specify a
+   * temp bucket, Dataproc will determine a Cloud Storage location (US, ASIA, or
+   * EU) for your cluster's temp bucket according to the Compute Engine zone
+   * where your cluster is deployed, and then create and manage this
+   * project-level, per-location bucket. The default bucket has a TTL of 90
+   * days, but you can use any TTL (or none) if you specify a bucket.
+   * 
+ * + * string temp_bucket = 2 [(.google.api.field_behavior) = OPTIONAL]; + * + * @return The tempBucket. + */ + @java.lang.Override + public java.lang.String getTempBucket() { + java.lang.Object ref = tempBucket_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + tempBucket_ = s; + return s; + } + } + /** + * + * + *
+   * Optional. A Cloud Storage bucket used to store ephemeral cluster and jobs
+   * data, such as Spark and MapReduce history files. If you do not specify a
+   * temp bucket, Dataproc will determine a Cloud Storage location (US, ASIA, or
+   * EU) for your cluster's temp bucket according to the Compute Engine zone
+   * where your cluster is deployed, and then create and manage this
+   * project-level, per-location bucket. The default bucket has a TTL of 90
+   * days, but you can use any TTL (or none) if you specify a bucket.
+   * 
+ * + * string temp_bucket = 2 [(.google.api.field_behavior) = OPTIONAL]; + * + * @return The bytes for tempBucket. + */ + @java.lang.Override + public com.google.protobuf.ByteString getTempBucketBytes() { + java.lang.Object ref = tempBucket_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + tempBucket_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + public static final int GCE_CLUSTER_CONFIG_FIELD_NUMBER = 8; private com.google.cloud.dataproc.v1beta2.GceClusterConfig gceClusterConfig_; /** @@ -1083,10 +1152,10 @@ public com.google.cloud.dataproc.v1beta2.SecurityConfigOrBuilder getSecurityConf * * *
-   * Optional. The Kubernetes Engine config for Dataproc clusters deployed to Kubernetes.
-   * Setting this is considered mutually exclusive with Compute Engine-based
-   * options such as `gce_cluster_config`, `master_config`, `worker_config`,
-   * `secondary_worker_config`, and `autoscaling_config`.
+   * Optional. The Kubernetes Engine config for Dataproc clusters deployed to
+   * Kubernetes. Setting this is considered mutually exclusive with Compute
+   * Engine-based options such as `gce_cluster_config`, `master_config`,
+   * `worker_config`, `secondary_worker_config`, and `autoscaling_config`.
    * 
* * @@ -1103,10 +1172,10 @@ public boolean hasGkeClusterConfig() { * * *
-   * Optional. The Kubernetes Engine config for Dataproc clusters deployed to Kubernetes.
-   * Setting this is considered mutually exclusive with Compute Engine-based
-   * options such as `gce_cluster_config`, `master_config`, `worker_config`,
-   * `secondary_worker_config`, and `autoscaling_config`.
+   * Optional. The Kubernetes Engine config for Dataproc clusters deployed to
+   * Kubernetes. Setting this is considered mutually exclusive with Compute
+   * Engine-based options such as `gce_cluster_config`, `master_config`,
+   * `worker_config`, `secondary_worker_config`, and `autoscaling_config`.
    * 
* * @@ -1125,10 +1194,10 @@ public com.google.cloud.dataproc.v1beta2.GkeClusterConfig getGkeClusterConfig() * * *
-   * Optional. The Kubernetes Engine config for Dataproc clusters deployed to Kubernetes.
-   * Setting this is considered mutually exclusive with Compute Engine-based
-   * options such as `gce_cluster_config`, `master_config`, `worker_config`,
-   * `secondary_worker_config`, and `autoscaling_config`.
+   * Optional. The Kubernetes Engine config for Dataproc clusters deployed to
+   * Kubernetes. Setting this is considered mutually exclusive with Compute
+   * Engine-based options such as `gce_cluster_config`, `master_config`,
+   * `worker_config`, `secondary_worker_config`, and `autoscaling_config`.
    * 
* * @@ -1158,6 +1227,9 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io if (!getConfigBucketBytes().isEmpty()) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, configBucket_); } + if (!getTempBucketBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 2, tempBucket_); + } if (gceClusterConfig_ != null) { output.writeMessage(8, getGceClusterConfig()); } @@ -1206,6 +1278,9 @@ public int getSerializedSize() { if (!getConfigBucketBytes().isEmpty()) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, configBucket_); } + if (!getTempBucketBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, tempBucket_); + } if (gceClusterConfig_ != null) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(8, getGceClusterConfig()); } @@ -1262,6 +1337,7 @@ public boolean equals(final java.lang.Object obj) { (com.google.cloud.dataproc.v1beta2.ClusterConfig) obj; if (!getConfigBucket().equals(other.getConfigBucket())) return false; + if (!getTempBucket().equals(other.getTempBucket())) return false; if (hasGceClusterConfig() != other.hasGceClusterConfig()) return false; if (hasGceClusterConfig()) { if (!getGceClusterConfig().equals(other.getGceClusterConfig())) return false; @@ -1320,6 +1396,8 @@ public int hashCode() { hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + CONFIG_BUCKET_FIELD_NUMBER; hash = (53 * hash) + getConfigBucket().hashCode(); + hash = (37 * hash) + TEMP_BUCKET_FIELD_NUMBER; + hash = (53 * hash) + getTempBucket().hashCode(); if (hasGceClusterConfig()) { hash = (37 * hash) + GCE_CLUSTER_CONFIG_FIELD_NUMBER; hash = (53 * hash) + getGceClusterConfig().hashCode(); @@ -1517,6 +1595,8 @@ public Builder clear() { super.clear(); configBucket_ = ""; + tempBucket_ = ""; + if (gceClusterConfigBuilder_ == null) { gceClusterConfig_ = null; } else { @@ -1618,6 +1698,7 @@ public com.google.cloud.dataproc.v1beta2.ClusterConfig buildPartial() { new com.google.cloud.dataproc.v1beta2.ClusterConfig(this); int from_bitField0_ = bitField0_; result.configBucket_ = configBucket_; + result.tempBucket_ = tempBucket_; if (gceClusterConfigBuilder_ == null) { result.gceClusterConfig_ = gceClusterConfig_; } else { @@ -1736,6 +1817,10 @@ public Builder mergeFrom(com.google.cloud.dataproc.v1beta2.ClusterConfig other) configBucket_ = other.configBucket_; onChanged(); } + if (!other.getTempBucket().isEmpty()) { + tempBucket_ = other.tempBucket_; + onChanged(); + } if (other.hasGceClusterConfig()) { mergeGceClusterConfig(other.getGceClusterConfig()); } @@ -1973,6 +2058,142 @@ public Builder setConfigBucketBytes(com.google.protobuf.ByteString value) { return this; } + private java.lang.Object tempBucket_ = ""; + /** + * + * + *
+     * Optional. A Cloud Storage bucket used to store ephemeral cluster and jobs
+     * data, such as Spark and MapReduce history files. If you do not specify a
+     * temp bucket, Dataproc will determine a Cloud Storage location (US, ASIA, or
+     * EU) for your cluster's temp bucket according to the Compute Engine zone
+     * where your cluster is deployed, and then create and manage this
+     * project-level, per-location bucket. The default bucket has a TTL of 90
+     * days, but you can use any TTL (or none) if you specify a bucket.
+     * 
+ * + * string temp_bucket = 2 [(.google.api.field_behavior) = OPTIONAL]; + * + * @return The tempBucket. + */ + public java.lang.String getTempBucket() { + java.lang.Object ref = tempBucket_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + tempBucket_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * + * + *
+     * Optional. A Cloud Storage bucket used to store ephemeral cluster and jobs
+     * data, such as Spark and MapReduce history files. If you do not specify a
+     * temp bucket, Dataproc will determine a Cloud Storage location (US, ASIA, or
+     * EU) for your cluster's temp bucket according to the Compute Engine zone
+     * where your cluster is deployed, and then create and manage this
+     * project-level, per-location bucket. The default bucket has a TTL of 90
+     * days, but you can use any TTL (or none) if you specify a bucket.
+     * 
+ * + * string temp_bucket = 2 [(.google.api.field_behavior) = OPTIONAL]; + * + * @return The bytes for tempBucket. + */ + public com.google.protobuf.ByteString getTempBucketBytes() { + java.lang.Object ref = tempBucket_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + tempBucket_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * + * + *
+     * Optional. A Cloud Storage bucket used to store ephemeral cluster and jobs
+     * data, such as Spark and MapReduce history files. If you do not specify a
+     * temp bucket, Dataproc will determine a Cloud Storage location (US, ASIA, or
+     * EU) for your cluster's temp bucket according to the Compute Engine zone
+     * where your cluster is deployed, and then create and manage this
+     * project-level, per-location bucket. The default bucket has a TTL of 90
+     * days, but you can use any TTL (or none) if you specify a bucket.
+     * 
+ * + * string temp_bucket = 2 [(.google.api.field_behavior) = OPTIONAL]; + * + * @param value The tempBucket to set. + * @return This builder for chaining. + */ + public Builder setTempBucket(java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + tempBucket_ = value; + onChanged(); + return this; + } + /** + * + * + *
+     * Optional. A Cloud Storage bucket used to store ephemeral cluster and jobs
+     * data, such as Spark and MapReduce history files. If you do not specify a
+     * temp bucket, Dataproc will determine a Cloud Storage location (US, ASIA, or
+     * EU) for your cluster's temp bucket according to the Compute Engine zone
+     * where your cluster is deployed, and then create and manage this
+     * project-level, per-location bucket. The default bucket has a TTL of 90
+     * days, but you can use any TTL (or none) if you specify a bucket.
+     * 
+ * + * string temp_bucket = 2 [(.google.api.field_behavior) = OPTIONAL]; + * + * @return This builder for chaining. + */ + public Builder clearTempBucket() { + + tempBucket_ = getDefaultInstance().getTempBucket(); + onChanged(); + return this; + } + /** + * + * + *
+     * Optional. A Cloud Storage bucket used to store ephemeral cluster and jobs
+     * data, such as Spark and MapReduce history files. If you do not specify a
+     * temp bucket, Dataproc will determine a Cloud Storage location (US, ASIA, or
+     * EU) for your cluster's temp bucket according to the Compute Engine zone
+     * where your cluster is deployed, and then create and manage this
+     * project-level, per-location bucket. The default bucket has a TTL of 90
+     * days, but you can use any TTL (or none) if you specify a bucket.
+     * 
+ * + * string temp_bucket = 2 [(.google.api.field_behavior) = OPTIONAL]; + * + * @param value The bytes for tempBucket to set. + * @return This builder for chaining. + */ + public Builder setTempBucketBytes(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + tempBucket_ = value; + onChanged(); + return this; + } + private com.google.cloud.dataproc.v1beta2.GceClusterConfig gceClusterConfig_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.dataproc.v1beta2.GceClusterConfig, @@ -4686,10 +4907,10 @@ public com.google.cloud.dataproc.v1beta2.SecurityConfigOrBuilder getSecurityConf * * *
-     * Optional. The Kubernetes Engine config for Dataproc clusters deployed to Kubernetes.
-     * Setting this is considered mutually exclusive with Compute Engine-based
-     * options such as `gce_cluster_config`, `master_config`, `worker_config`,
-     * `secondary_worker_config`, and `autoscaling_config`.
+     * Optional. The Kubernetes Engine config for Dataproc clusters deployed to
+     * Kubernetes. Setting this is considered mutually exclusive with Compute
+     * Engine-based options such as `gce_cluster_config`, `master_config`,
+     * `worker_config`, `secondary_worker_config`, and `autoscaling_config`.
      * 
* * @@ -4705,10 +4926,10 @@ public boolean hasGkeClusterConfig() { * * *
-     * Optional. The Kubernetes Engine config for Dataproc clusters deployed to Kubernetes.
-     * Setting this is considered mutually exclusive with Compute Engine-based
-     * options such as `gce_cluster_config`, `master_config`, `worker_config`,
-     * `secondary_worker_config`, and `autoscaling_config`.
+     * Optional. The Kubernetes Engine config for Dataproc clusters deployed to
+     * Kubernetes. Setting this is considered mutually exclusive with Compute
+     * Engine-based options such as `gce_cluster_config`, `master_config`,
+     * `worker_config`, `secondary_worker_config`, and `autoscaling_config`.
      * 
* * @@ -4730,10 +4951,10 @@ public com.google.cloud.dataproc.v1beta2.GkeClusterConfig getGkeClusterConfig() * * *
-     * Optional. The Kubernetes Engine config for Dataproc clusters deployed to Kubernetes.
-     * Setting this is considered mutually exclusive with Compute Engine-based
-     * options such as `gce_cluster_config`, `master_config`, `worker_config`,
-     * `secondary_worker_config`, and `autoscaling_config`.
+     * Optional. The Kubernetes Engine config for Dataproc clusters deployed to
+     * Kubernetes. Setting this is considered mutually exclusive with Compute
+     * Engine-based options such as `gce_cluster_config`, `master_config`,
+     * `worker_config`, `secondary_worker_config`, and `autoscaling_config`.
      * 
* * @@ -4757,10 +4978,10 @@ public Builder setGkeClusterConfig(com.google.cloud.dataproc.v1beta2.GkeClusterC * * *
-     * Optional. The Kubernetes Engine config for Dataproc clusters deployed to Kubernetes.
-     * Setting this is considered mutually exclusive with Compute Engine-based
-     * options such as `gce_cluster_config`, `master_config`, `worker_config`,
-     * `secondary_worker_config`, and `autoscaling_config`.
+     * Optional. The Kubernetes Engine config for Dataproc clusters deployed to
+     * Kubernetes. Setting this is considered mutually exclusive with Compute
+     * Engine-based options such as `gce_cluster_config`, `master_config`,
+     * `worker_config`, `secondary_worker_config`, and `autoscaling_config`.
      * 
* * @@ -4782,10 +5003,10 @@ public Builder setGkeClusterConfig( * * *
-     * Optional. The Kubernetes Engine config for Dataproc clusters deployed to Kubernetes.
-     * Setting this is considered mutually exclusive with Compute Engine-based
-     * options such as `gce_cluster_config`, `master_config`, `worker_config`,
-     * `secondary_worker_config`, and `autoscaling_config`.
+     * Optional. The Kubernetes Engine config for Dataproc clusters deployed to
+     * Kubernetes. Setting this is considered mutually exclusive with Compute
+     * Engine-based options such as `gce_cluster_config`, `master_config`,
+     * `worker_config`, `secondary_worker_config`, and `autoscaling_config`.
      * 
* * @@ -4813,10 +5034,10 @@ public Builder mergeGkeClusterConfig(com.google.cloud.dataproc.v1beta2.GkeCluste * * *
-     * Optional. The Kubernetes Engine config for Dataproc clusters deployed to Kubernetes.
-     * Setting this is considered mutually exclusive with Compute Engine-based
-     * options such as `gce_cluster_config`, `master_config`, `worker_config`,
-     * `secondary_worker_config`, and `autoscaling_config`.
+     * Optional. The Kubernetes Engine config for Dataproc clusters deployed to
+     * Kubernetes. Setting this is considered mutually exclusive with Compute
+     * Engine-based options such as `gce_cluster_config`, `master_config`,
+     * `worker_config`, `secondary_worker_config`, and `autoscaling_config`.
      * 
* * @@ -4838,10 +5059,10 @@ public Builder clearGkeClusterConfig() { * * *
-     * Optional. The Kubernetes Engine config for Dataproc clusters deployed to Kubernetes.
-     * Setting this is considered mutually exclusive with Compute Engine-based
-     * options such as `gce_cluster_config`, `master_config`, `worker_config`,
-     * `secondary_worker_config`, and `autoscaling_config`.
+     * Optional. The Kubernetes Engine config for Dataproc clusters deployed to
+     * Kubernetes. Setting this is considered mutually exclusive with Compute
+     * Engine-based options such as `gce_cluster_config`, `master_config`,
+     * `worker_config`, `secondary_worker_config`, and `autoscaling_config`.
      * 
* * @@ -4857,10 +5078,10 @@ public com.google.cloud.dataproc.v1beta2.GkeClusterConfig.Builder getGkeClusterC * * *
-     * Optional. The Kubernetes Engine config for Dataproc clusters deployed to Kubernetes.
-     * Setting this is considered mutually exclusive with Compute Engine-based
-     * options such as `gce_cluster_config`, `master_config`, `worker_config`,
-     * `secondary_worker_config`, and `autoscaling_config`.
+     * Optional. The Kubernetes Engine config for Dataproc clusters deployed to
+     * Kubernetes. Setting this is considered mutually exclusive with Compute
+     * Engine-based options such as `gce_cluster_config`, `master_config`,
+     * `worker_config`, `secondary_worker_config`, and `autoscaling_config`.
      * 
* * @@ -4881,10 +5102,10 @@ public com.google.cloud.dataproc.v1beta2.GkeClusterConfig.Builder getGkeClusterC * * *
-     * Optional. The Kubernetes Engine config for Dataproc clusters deployed to Kubernetes.
-     * Setting this is considered mutually exclusive with Compute Engine-based
-     * options such as `gce_cluster_config`, `master_config`, `worker_config`,
-     * `secondary_worker_config`, and `autoscaling_config`.
+     * Optional. The Kubernetes Engine config for Dataproc clusters deployed to
+     * Kubernetes. Setting this is considered mutually exclusive with Compute
+     * Engine-based options such as `gce_cluster_config`, `master_config`,
+     * `worker_config`, `secondary_worker_config`, and `autoscaling_config`.
      * 
* * diff --git a/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/ClusterConfigOrBuilder.java b/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/ClusterConfigOrBuilder.java index cd74b235..a4f0e6a9 100644 --- a/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/ClusterConfigOrBuilder.java +++ b/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/ClusterConfigOrBuilder.java @@ -64,6 +64,43 @@ public interface ClusterConfigOrBuilder */ com.google.protobuf.ByteString getConfigBucketBytes(); + /** + * + * + *
+   * Optional. A Cloud Storage bucket used to store ephemeral cluster and jobs
+   * data, such as Spark and MapReduce history files. If you do not specify a
+   * temp bucket, Dataproc will determine a Cloud Storage location (US, ASIA, or
+   * EU) for your cluster's temp bucket according to the Compute Engine zone
+   * where your cluster is deployed, and then create and manage this
+   * project-level, per-location bucket. The default bucket has a TTL of 90
+   * days, but you can use any TTL (or none) if you specify a bucket.
+   * 
+ * + * string temp_bucket = 2 [(.google.api.field_behavior) = OPTIONAL]; + * + * @return The tempBucket. + */ + java.lang.String getTempBucket(); + /** + * + * + *
+   * Optional. A Cloud Storage bucket used to store ephemeral cluster and jobs
+   * data, such as Spark and MapReduce history files. If you do not specify a
+   * temp bucket, Dataproc will determine a Cloud Storage location (US, ASIA, or
+   * EU) for your cluster's temp bucket according to the Compute Engine zone
+   * where your cluster is deployed, and then create and manage this
+   * project-level, per-location bucket. The default bucket has a TTL of 90
+   * days, but you can use any TTL (or none) if you specify a bucket.
+   * 
+ * + * string temp_bucket = 2 [(.google.api.field_behavior) = OPTIONAL]; + * + * @return The bytes for tempBucket. + */ + com.google.protobuf.ByteString getTempBucketBytes(); + /** * * @@ -613,10 +650,10 @@ public interface ClusterConfigOrBuilder * * *
-   * Optional. The Kubernetes Engine config for Dataproc clusters deployed to Kubernetes.
-   * Setting this is considered mutually exclusive with Compute Engine-based
-   * options such as `gce_cluster_config`, `master_config`, `worker_config`,
-   * `secondary_worker_config`, and `autoscaling_config`.
+   * Optional. The Kubernetes Engine config for Dataproc clusters deployed to
+   * Kubernetes. Setting this is considered mutually exclusive with Compute
+   * Engine-based options such as `gce_cluster_config`, `master_config`,
+   * `worker_config`, `secondary_worker_config`, and `autoscaling_config`.
    * 
* * @@ -630,10 +667,10 @@ public interface ClusterConfigOrBuilder * * *
-   * Optional. The Kubernetes Engine config for Dataproc clusters deployed to Kubernetes.
-   * Setting this is considered mutually exclusive with Compute Engine-based
-   * options such as `gce_cluster_config`, `master_config`, `worker_config`,
-   * `secondary_worker_config`, and `autoscaling_config`.
+   * Optional. The Kubernetes Engine config for Dataproc clusters deployed to
+   * Kubernetes. Setting this is considered mutually exclusive with Compute
+   * Engine-based options such as `gce_cluster_config`, `master_config`,
+   * `worker_config`, `secondary_worker_config`, and `autoscaling_config`.
    * 
* * @@ -647,10 +684,10 @@ public interface ClusterConfigOrBuilder * * *
-   * Optional. The Kubernetes Engine config for Dataproc clusters deployed to Kubernetes.
-   * Setting this is considered mutually exclusive with Compute Engine-based
-   * options such as `gce_cluster_config`, `master_config`, `worker_config`,
-   * `secondary_worker_config`, and `autoscaling_config`.
+   * Optional. The Kubernetes Engine config for Dataproc clusters deployed to
+   * Kubernetes. Setting this is considered mutually exclusive with Compute
+   * Engine-based options such as `gce_cluster_config`, `master_config`,
+   * `worker_config`, `secondary_worker_config`, and `autoscaling_config`.
    * 
* * diff --git a/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/ClusterName.java b/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/ClusterName.java new file mode 100644 index 00000000..244cc71b --- /dev/null +++ b/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/ClusterName.java @@ -0,0 +1,223 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.dataproc.v1beta2; + +import com.google.api.pathtemplate.PathTemplate; +import com.google.api.resourcenames.ResourceName; +import com.google.common.base.Preconditions; +import com.google.common.collect.ImmutableMap; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import javax.annotation.Generated; + +// AUTO-GENERATED DOCUMENTATION AND CLASS. +@Generated("by gapic-generator-java") +public class ClusterName implements ResourceName { + private static final PathTemplate PROJECT_LOCATION_CLUSTER = + PathTemplate.createWithoutUrlEncoding( + "projects/{project}/locations/{location}/clusters/{cluster}"); + private volatile Map fieldValuesMap; + private final String project; + private final String location; + private final String cluster; + + @Deprecated + protected ClusterName() { + project = null; + location = null; + cluster = null; + } + + private ClusterName(Builder builder) { + project = Preconditions.checkNotNull(builder.getProject()); + location = Preconditions.checkNotNull(builder.getLocation()); + cluster = Preconditions.checkNotNull(builder.getCluster()); + } + + public String getProject() { + return project; + } + + public String getLocation() { + return location; + } + + public String getCluster() { + return cluster; + } + + public static Builder newBuilder() { + return new Builder(); + } + + public Builder toBuilder() { + return new Builder(this); + } + + public static ClusterName of(String project, String location, String cluster) { + return newBuilder().setProject(project).setLocation(location).setCluster(cluster).build(); + } + + public static String format(String project, String location, String cluster) { + return newBuilder() + .setProject(project) + .setLocation(location) + .setCluster(cluster) + .build() + .toString(); + } + + public static ClusterName parse(String formattedString) { + if (formattedString.isEmpty()) { + return null; + } + Map matchMap = + PROJECT_LOCATION_CLUSTER.validatedMatch( + formattedString, "ClusterName.parse: formattedString not in valid format"); + return of(matchMap.get("project"), matchMap.get("location"), matchMap.get("cluster")); + } + + public static List parseList(List formattedStrings) { + List list = new ArrayList<>(formattedStrings.size()); + for (String formattedString : formattedStrings) { + list.add(parse(formattedString)); + } + return list; + } + + public static List toStringList(List values) { + List list = new ArrayList<>(values.size()); + for (ClusterName value : values) { + if (value == null) { + list.add(""); + } else { + list.add(value.toString()); + } + } + return list; + } + + public static boolean isParsableFrom(String formattedString) { + return PROJECT_LOCATION_CLUSTER.matches(formattedString); + } + + @Override + public Map getFieldValuesMap() { + if (fieldValuesMap == null) { + synchronized (this) { + if (fieldValuesMap == null) { + ImmutableMap.Builder fieldMapBuilder = ImmutableMap.builder(); + if (project != null) { + fieldMapBuilder.put("project", project); + } + if (location != null) { + fieldMapBuilder.put("location", location); + } + if (cluster != null) { + fieldMapBuilder.put("cluster", cluster); + } + fieldValuesMap = fieldMapBuilder.build(); + } + } + } + return fieldValuesMap; + } + + public String getFieldValue(String fieldName) { + return getFieldValuesMap().get(fieldName); + } + + @Override + public String toString() { + return PROJECT_LOCATION_CLUSTER.instantiate( + "project", project, "location", location, "cluster", cluster); + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o != null || getClass() == o.getClass()) { + ClusterName that = ((ClusterName) o); + return Objects.equals(this.project, that.project) + && Objects.equals(this.location, that.location) + && Objects.equals(this.cluster, that.cluster); + } + return false; + } + + @Override + public int hashCode() { + int h = 1; + h *= 1000003; + h ^= Objects.hashCode(project); + h *= 1000003; + h ^= Objects.hashCode(location); + h *= 1000003; + h ^= Objects.hashCode(cluster); + return h; + } + + /** Builder for projects/{project}/locations/{location}/clusters/{cluster}. */ + public static class Builder { + private String project; + private String location; + private String cluster; + + protected Builder() {} + + public String getProject() { + return project; + } + + public String getLocation() { + return location; + } + + public String getCluster() { + return cluster; + } + + public Builder setProject(String project) { + this.project = project; + return this; + } + + public Builder setLocation(String location) { + this.location = location; + return this; + } + + public Builder setCluster(String cluster) { + this.cluster = cluster; + return this; + } + + private Builder(ClusterName clusterName) { + project = clusterName.project; + location = clusterName.location; + cluster = clusterName.cluster; + } + + public ClusterName build() { + return new ClusterName(this); + } + } +} diff --git a/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/ClustersProto.java b/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/ClustersProto.java index 9f45a968..ecaed7cf 100644 --- a/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/ClustersProto.java +++ b/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/ClustersProto.java @@ -193,207 +193,217 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { + "\n\014cluster_uuid\030\006 \001(\tB\003\340A\003\022C\n\007metrics\030\t \001" + "(\0132-.google.cloud.dataproc.v1beta2.Clust" + "erMetricsB\003\340A\003\032-\n\013LabelsEntry\022\013\n\003key\030\001 \001" - + "(\t\022\r\n\005value\030\002 \001(\t:\0028\001\"\201\010\n\rClusterConfig\022" - + "\032\n\rconfig_bucket\030\001 \001(\tB\003\340A\001\022P\n\022gce_clust" - + "er_config\030\010 \001(\0132/.google.cloud.dataproc." - + "v1beta2.GceClusterConfigB\003\340A\001\022N\n\rmaster_" - + "config\030\t \001(\01322.google.cloud.dataproc.v1b" - + "eta2.InstanceGroupConfigB\003\340A\001\022N\n\rworker_" - + "config\030\n \001(\01322.google.cloud.dataproc.v1b" - + "eta2.InstanceGroupConfigB\003\340A\001\022X\n\027seconda" - + "ry_worker_config\030\014 \001(\01322.google.cloud.da" - + "taproc.v1beta2.InstanceGroupConfigB\003\340A\001\022" - + "K\n\017software_config\030\r \001(\0132-.google.cloud." - + "dataproc.v1beta2.SoftwareConfigB\003\340A\001\022M\n\020" - + "lifecycle_config\030\016 \001(\0132..google.cloud.da" - + "taproc.v1beta2.LifecycleConfigB\003\340A\001\022\\\n\026i" - + "nitialization_actions\030\013 \003(\01327.google.clo" - + "ud.dataproc.v1beta2.NodeInitializationAc" - + "tionB\003\340A\001\022O\n\021encryption_config\030\017 \001(\0132/.g" - + "oogle.cloud.dataproc.v1beta2.EncryptionC" - + "onfigB\003\340A\001\022Q\n\022autoscaling_config\030\020 \001(\01320" - + ".google.cloud.dataproc.v1beta2.Autoscali" - + "ngConfigB\003\340A\001\022K\n\017endpoint_config\030\021 \001(\0132-" - + ".google.cloud.dataproc.v1beta2.EndpointC" - + "onfigB\003\340A\001\022K\n\017security_config\030\022 \001(\0132-.go" - + "ogle.cloud.dataproc.v1beta2.SecurityConf" - + "igB\003\340A\001\022P\n\022gke_cluster_config\030\023 \001(\0132/.go" - + "ogle.cloud.dataproc.v1beta2.GkeClusterCo" - + "nfigB\003\340A\001\"\230\002\n\020GkeClusterConfig\022|\n namesp" - + "aced_gke_deployment_target\030\001 \001(\0132M.googl" - + "e.cloud.dataproc.v1beta2.GkeClusterConfi" - + "g.NamespacedGkeDeploymentTargetB\003\340A\001\032\205\001\n" - + "\035NamespacedGkeDeploymentTarget\022D\n\022target" - + "_gke_cluster\030\001 \001(\tB(\340A\001\372A\"\n container.go" - + "ogleapis.com/Cluster\022\036\n\021cluster_namespac" - + "e\030\002 \001(\tB\003\340A\001\"\277\001\n\016EndpointConfig\022U\n\nhttp_" - + "ports\030\001 \003(\0132<.google.cloud.dataproc.v1be" - + "ta2.EndpointConfig.HttpPortsEntryB\003\340A\003\022$" - + "\n\027enable_http_port_access\030\002 \001(\010B\003\340A\001\0320\n\016" - + "HttpPortsEntry\022\013\n\003key\030\001 \001(\t\022\r\n\005value\030\002 \001" - + "(\t:\0028\001\",\n\021AutoscalingConfig\022\027\n\npolicy_ur" - + "i\030\001 \001(\tB\003\340A\001\"4\n\020EncryptionConfig\022 \n\023gce_" - + "pd_kms_key_name\030\001 \001(\tB\003\340A\001\"\251\003\n\020GceCluste" - + "rConfig\022\025\n\010zone_uri\030\001 \001(\tB\003\340A\001\022\030\n\013networ" - + "k_uri\030\002 \001(\tB\003\340A\001\022\033\n\016subnetwork_uri\030\006 \001(\t" - + "B\003\340A\001\022\035\n\020internal_ip_only\030\007 \001(\010B\003\340A\001\022\034\n\017" - + "service_account\030\010 \001(\tB\003\340A\001\022#\n\026service_ac" - + "count_scopes\030\003 \003(\tB\003\340A\001\022\014\n\004tags\030\004 \003(\t\022O\n" - + "\010metadata\030\005 \003(\0132=.google.cloud.dataproc." - + "v1beta2.GceClusterConfig.MetadataEntry\022U" - + "\n\024reservation_affinity\030\013 \001(\01322.google.cl" - + "oud.dataproc.v1beta2.ReservationAffinity" - + "B\003\340A\001\032/\n\rMetadataEntry\022\013\n\003key\030\001 \001(\t\022\r\n\005v" - + "alue\030\002 \001(\t:\0028\001\"\244\003\n\023InstanceGroupConfig\022\032" - + "\n\rnum_instances\030\001 \001(\005B\003\340A\001\022\033\n\016instance_n" - + "ames\030\002 \003(\tB\003\340A\003\022\026\n\timage_uri\030\003 \001(\tB\003\340A\001\022" - + "\035\n\020machine_type_uri\030\004 \001(\tB\003\340A\001\022C\n\013disk_c" - + "onfig\030\005 \001(\0132).google.cloud.dataproc.v1be" - + "ta2.DiskConfigB\003\340A\001\022\033\n\016is_preemptible\030\006 " - + "\001(\010B\003\340A\003\022T\n\024managed_group_config\030\007 \001(\01321" - + ".google.cloud.dataproc.v1beta2.ManagedGr" - + "oupConfigB\003\340A\003\022K\n\014accelerators\030\010 \003(\01320.g" - + "oogle.cloud.dataproc.v1beta2.Accelerator" - + "ConfigB\003\340A\001\022\030\n\020min_cpu_platform\030\t \001(\t\"c\n" - + "\022ManagedGroupConfig\022#\n\026instance_template" - + "_name\030\001 \001(\tB\003\340A\003\022(\n\033instance_group_manag" - + "er_name\030\002 \001(\tB\003\340A\003\"L\n\021AcceleratorConfig\022" - + "\034\n\024accelerator_type_uri\030\001 \001(\t\022\031\n\021acceler" - + "ator_count\030\002 \001(\005\"a\n\nDiskConfig\022\033\n\016boot_d" - + "isk_type\030\003 \001(\tB\003\340A\001\022\036\n\021boot_disk_size_gb" - + "\030\001 \001(\005B\003\340A\001\022\026\n\016num_local_ssds\030\002 \001(\005\"\203\002\n\017" - + "LifecycleConfig\0227\n\017idle_delete_ttl\030\001 \001(\013" - + "2\031.google.protobuf.DurationB\003\340A\001\022;\n\020auto" - + "_delete_time\030\002 \001(\0132\032.google.protobuf.Tim" - + "estampB\003\340A\001H\000\0229\n\017auto_delete_ttl\030\003 \001(\0132\031" - + ".google.protobuf.DurationB\003\340A\001H\000\0228\n\017idle" - + "_start_time\030\004 \001(\0132\032.google.protobuf.Time" - + "stampB\003\340A\003B\005\n\003ttl\"X\n\016SecurityConfig\022F\n\017k" - + "erberos_config\030\001 \001(\0132-.google.cloud.data" - + "proc.v1beta2.KerberosConfig\"\220\004\n\016Kerberos" - + "Config\022\034\n\017enable_kerberos\030\001 \001(\010B\003\340A\001\022(\n\033" - + "root_principal_password_uri\030\002 \001(\tB\003\340A\002\022\030" - + "\n\013kms_key_uri\030\003 \001(\tB\003\340A\002\022\031\n\014keystore_uri" - + "\030\004 \001(\tB\003\340A\001\022\033\n\016truststore_uri\030\005 \001(\tB\003\340A\001" - + "\022\"\n\025keystore_password_uri\030\006 \001(\tB\003\340A\001\022\035\n\020" - + "key_password_uri\030\007 \001(\tB\003\340A\001\022$\n\027truststor" - + "e_password_uri\030\010 \001(\tB\003\340A\001\022$\n\027cross_realm" - + "_trust_realm\030\t \001(\tB\003\340A\001\022\"\n\025cross_realm_t" - + "rust_kdc\030\n \001(\tB\003\340A\001\022+\n\036cross_realm_trust" - + "_admin_server\030\013 \001(\tB\003\340A\001\0222\n%cross_realm_" - + "trust_shared_password_uri\030\014 \001(\tB\003\340A\001\022\033\n\016" - + "kdc_db_key_uri\030\r \001(\tB\003\340A\001\022\037\n\022tgt_lifetim" - + "e_hours\030\016 \001(\005B\003\340A\001\022\022\n\005realm\030\017 \001(\tB\003\340A\001\"s" - + "\n\030NodeInitializationAction\022\034\n\017executable" - + "_file\030\001 \001(\tB\003\340A\002\0229\n\021execution_timeout\030\002 " - + "\001(\0132\031.google.protobuf.DurationB\003\340A\001\"\264\003\n\r" - + "ClusterStatus\022F\n\005state\030\001 \001(\01622.google.cl" - + "oud.dataproc.v1beta2.ClusterStatus.State" - + "B\003\340A\003\022\023\n\006detail\030\002 \001(\tB\003\340A\003\0229\n\020state_star" - + "t_time\030\003 \001(\0132\032.google.protobuf.Timestamp" - + "B\003\340A\003\022L\n\010substate\030\004 \001(\01625.google.cloud.d" - + "ataproc.v1beta2.ClusterStatus.SubstateB\003" - + "\340A\003\"\177\n\005State\022\013\n\007UNKNOWN\020\000\022\014\n\010CREATING\020\001\022" - + "\013\n\007RUNNING\020\002\022\t\n\005ERROR\020\003\022\014\n\010DELETING\020\004\022\014\n" - + "\010UPDATING\020\005\022\014\n\010STOPPING\020\006\022\013\n\007STOPPED\020\007\022\014" - + "\n\010STARTING\020\010\"<\n\010Substate\022\017\n\013UNSPECIFIED\020" - + "\000\022\r\n\tUNHEALTHY\020\001\022\020\n\014STALE_STATUS\020\002\"\376\001\n\016S" - + "oftwareConfig\022\032\n\rimage_version\030\001 \001(\tB\003\340A" - + "\001\022V\n\nproperties\030\002 \003(\0132=.google.cloud.dat" - + "aproc.v1beta2.SoftwareConfig.PropertiesE" - + "ntryB\003\340A\001\022E\n\023optional_components\030\003 \003(\0162(" - + ".google.cloud.dataproc.v1beta2.Component" - + "\0321\n\017PropertiesEntry\022\013\n\003key\030\001 \001(\t\022\r\n\005valu" - + "e\030\002 \001(\t:\0028\001\"\244\002\n\016ClusterMetrics\022T\n\014hdfs_m" - + "etrics\030\001 \003(\0132>.google.cloud.dataproc.v1b" - + "eta2.ClusterMetrics.HdfsMetricsEntry\022T\n\014" - + "yarn_metrics\030\002 \003(\0132>.google.cloud.datapr" - + "oc.v1beta2.ClusterMetrics.YarnMetricsEnt" - + "ry\0322\n\020HdfsMetricsEntry\022\013\n\003key\030\001 \001(\t\022\r\n\005v" - + "alue\030\002 \001(\003:\0028\001\0322\n\020YarnMetricsEntry\022\013\n\003ke" - + "y\030\001 \001(\t\022\r\n\005value\030\002 \001(\003:\0028\001\"\233\001\n\024CreateClu" - + "sterRequest\022\027\n\nproject_id\030\001 \001(\tB\003\340A\002\022\023\n\006" - + "region\030\003 \001(\tB\003\340A\002\022<\n\007cluster\030\002 \001(\0132&.goo" - + "gle.cloud.dataproc.v1beta2.ClusterB\003\340A\002\022" - + "\027\n\nrequest_id\030\004 \001(\tB\003\340A\001\"\263\002\n\024UpdateClust" + + "(\t\022\r\n\005value\030\002 \001(\t:\0028\001\"\233\010\n\rClusterConfig\022" + + "\032\n\rconfig_bucket\030\001 \001(\tB\003\340A\001\022\030\n\013temp_buck" + + "et\030\002 \001(\tB\003\340A\001\022P\n\022gce_cluster_config\030\010 \001(" + + "\0132/.google.cloud.dataproc.v1beta2.GceClu" + + "sterConfigB\003\340A\001\022N\n\rmaster_config\030\t \001(\01322" + + ".google.cloud.dataproc.v1beta2.InstanceG" + + "roupConfigB\003\340A\001\022N\n\rworker_config\030\n \001(\01322" + + ".google.cloud.dataproc.v1beta2.InstanceG" + + "roupConfigB\003\340A\001\022X\n\027secondary_worker_conf" + + "ig\030\014 \001(\01322.google.cloud.dataproc.v1beta2" + + ".InstanceGroupConfigB\003\340A\001\022K\n\017software_co" + + "nfig\030\r \001(\0132-.google.cloud.dataproc.v1bet" + + "a2.SoftwareConfigB\003\340A\001\022M\n\020lifecycle_conf" + + "ig\030\016 \001(\0132..google.cloud.dataproc.v1beta2" + + ".LifecycleConfigB\003\340A\001\022\\\n\026initialization_" + + "actions\030\013 \003(\01327.google.cloud.dataproc.v1" + + "beta2.NodeInitializationActionB\003\340A\001\022O\n\021e" + + "ncryption_config\030\017 \001(\0132/.google.cloud.da" + + "taproc.v1beta2.EncryptionConfigB\003\340A\001\022Q\n\022" + + "autoscaling_config\030\020 \001(\01320.google.cloud." + + "dataproc.v1beta2.AutoscalingConfigB\003\340A\001\022" + + "K\n\017endpoint_config\030\021 \001(\0132-.google.cloud." + + "dataproc.v1beta2.EndpointConfigB\003\340A\001\022K\n\017" + + "security_config\030\022 \001(\0132-.google.cloud.dat" + + "aproc.v1beta2.SecurityConfigB\003\340A\001\022P\n\022gke" + + "_cluster_config\030\023 \001(\0132/.google.cloud.dat" + + "aproc.v1beta2.GkeClusterConfigB\003\340A\001\"\230\002\n\020" + + "GkeClusterConfig\022|\n namespaced_gke_deplo" + + "yment_target\030\001 \001(\0132M.google.cloud.datapr" + + "oc.v1beta2.GkeClusterConfig.NamespacedGk" + + "eDeploymentTargetB\003\340A\001\032\205\001\n\035NamespacedGke" + + "DeploymentTarget\022D\n\022target_gke_cluster\030\001" + + " \001(\tB(\340A\001\372A\"\n container.googleapis.com/C" + + "luster\022\036\n\021cluster_namespace\030\002 \001(\tB\003\340A\001\"\277" + + "\001\n\016EndpointConfig\022U\n\nhttp_ports\030\001 \003(\0132<." + + "google.cloud.dataproc.v1beta2.EndpointCo" + + "nfig.HttpPortsEntryB\003\340A\003\022$\n\027enable_http_" + + "port_access\030\002 \001(\010B\003\340A\001\0320\n\016HttpPortsEntry" + + "\022\013\n\003key\030\001 \001(\t\022\r\n\005value\030\002 \001(\t:\0028\001\",\n\021Auto" + + "scalingConfig\022\027\n\npolicy_uri\030\001 \001(\tB\003\340A\001\"4" + + "\n\020EncryptionConfig\022 \n\023gce_pd_kms_key_nam" + + "e\030\001 \001(\tB\003\340A\001\"\251\003\n\020GceClusterConfig\022\025\n\010zon" + + "e_uri\030\001 \001(\tB\003\340A\001\022\030\n\013network_uri\030\002 \001(\tB\003\340" + + "A\001\022\033\n\016subnetwork_uri\030\006 \001(\tB\003\340A\001\022\035\n\020inter" + + "nal_ip_only\030\007 \001(\010B\003\340A\001\022\034\n\017service_accoun" + + "t\030\010 \001(\tB\003\340A\001\022#\n\026service_account_scopes\030\003" + + " \003(\tB\003\340A\001\022\014\n\004tags\030\004 \003(\t\022O\n\010metadata\030\005 \003(" + + "\0132=.google.cloud.dataproc.v1beta2.GceClu" + + "sterConfig.MetadataEntry\022U\n\024reservation_" + + "affinity\030\013 \001(\01322.google.cloud.dataproc.v" + + "1beta2.ReservationAffinityB\003\340A\001\032/\n\rMetad" + + "ataEntry\022\013\n\003key\030\001 \001(\t\022\r\n\005value\030\002 \001(\t:\0028\001" + + "\"\334\004\n\023InstanceGroupConfig\022\032\n\rnum_instance" + + "s\030\001 \001(\005B\003\340A\001\022\033\n\016instance_names\030\002 \003(\tB\003\340A" + + "\003\022\026\n\timage_uri\030\003 \001(\tB\003\340A\001\022\035\n\020machine_typ" + + "e_uri\030\004 \001(\tB\003\340A\001\022C\n\013disk_config\030\005 \001(\0132)." + + "google.cloud.dataproc.v1beta2.DiskConfig" + + "B\003\340A\001\022\033\n\016is_preemptible\030\006 \001(\010B\003\340A\003\022^\n\016pr" + + "eemptibility\030\n \001(\0162A.google.cloud.datapr" + + "oc.v1beta2.InstanceGroupConfig.Preemptib" + + "ilityB\003\340A\001\022T\n\024managed_group_config\030\007 \001(\013" + + "21.google.cloud.dataproc.v1beta2.Managed" + + "GroupConfigB\003\340A\003\022K\n\014accelerators\030\010 \003(\01320" + + ".google.cloud.dataproc.v1beta2.Accelerat" + + "orConfigB\003\340A\001\022\030\n\020min_cpu_platform\030\t \001(\t\"" + + "V\n\016Preemptibility\022\036\n\032PREEMPTIBILITY_UNSP" + + "ECIFIED\020\000\022\023\n\017NON_PREEMPTIBLE\020\001\022\017\n\013PREEMP" + + "TIBLE\020\002\"c\n\022ManagedGroupConfig\022#\n\026instanc" + + "e_template_name\030\001 \001(\tB\003\340A\003\022(\n\033instance_g" + + "roup_manager_name\030\002 \001(\tB\003\340A\003\"L\n\021Accelera" + + "torConfig\022\034\n\024accelerator_type_uri\030\001 \001(\t\022" + + "\031\n\021accelerator_count\030\002 \001(\005\"a\n\nDiskConfig" + + "\022\033\n\016boot_disk_type\030\003 \001(\tB\003\340A\001\022\036\n\021boot_di" + + "sk_size_gb\030\001 \001(\005B\003\340A\001\022\026\n\016num_local_ssds\030" + + "\002 \001(\005\"\203\002\n\017LifecycleConfig\0227\n\017idle_delete" + + "_ttl\030\001 \001(\0132\031.google.protobuf.DurationB\003\340" + + "A\001\022;\n\020auto_delete_time\030\002 \001(\0132\032.google.pr" + + "otobuf.TimestampB\003\340A\001H\000\0229\n\017auto_delete_t" + + "tl\030\003 \001(\0132\031.google.protobuf.DurationB\003\340A\001" + + "H\000\0228\n\017idle_start_time\030\004 \001(\0132\032.google.pro" + + "tobuf.TimestampB\003\340A\003B\005\n\003ttl\"X\n\016SecurityC" + + "onfig\022F\n\017kerberos_config\030\001 \001(\0132-.google." + + "cloud.dataproc.v1beta2.KerberosConfig\"\220\004" + + "\n\016KerberosConfig\022\034\n\017enable_kerberos\030\001 \001(" + + "\010B\003\340A\001\022(\n\033root_principal_password_uri\030\002 " + + "\001(\tB\003\340A\002\022\030\n\013kms_key_uri\030\003 \001(\tB\003\340A\002\022\031\n\014ke" + + "ystore_uri\030\004 \001(\tB\003\340A\001\022\033\n\016truststore_uri\030" + + "\005 \001(\tB\003\340A\001\022\"\n\025keystore_password_uri\030\006 \001(" + + "\tB\003\340A\001\022\035\n\020key_password_uri\030\007 \001(\tB\003\340A\001\022$\n" + + "\027truststore_password_uri\030\010 \001(\tB\003\340A\001\022$\n\027c" + + "ross_realm_trust_realm\030\t \001(\tB\003\340A\001\022\"\n\025cro" + + "ss_realm_trust_kdc\030\n \001(\tB\003\340A\001\022+\n\036cross_r" + + "ealm_trust_admin_server\030\013 \001(\tB\003\340A\001\0222\n%cr" + + "oss_realm_trust_shared_password_uri\030\014 \001(" + + "\tB\003\340A\001\022\033\n\016kdc_db_key_uri\030\r \001(\tB\003\340A\001\022\037\n\022t" + + "gt_lifetime_hours\030\016 \001(\005B\003\340A\001\022\022\n\005realm\030\017 " + + "\001(\tB\003\340A\001\"s\n\030NodeInitializationAction\022\034\n\017" + + "executable_file\030\001 \001(\tB\003\340A\002\0229\n\021execution_" + + "timeout\030\002 \001(\0132\031.google.protobuf.Duration" + + "B\003\340A\001\"\264\003\n\rClusterStatus\022F\n\005state\030\001 \001(\01622" + + ".google.cloud.dataproc.v1beta2.ClusterSt" + + "atus.StateB\003\340A\003\022\023\n\006detail\030\002 \001(\tB\003\340A\003\0229\n\020" + + "state_start_time\030\003 \001(\0132\032.google.protobuf" + + ".TimestampB\003\340A\003\022L\n\010substate\030\004 \001(\01625.goog" + + "le.cloud.dataproc.v1beta2.ClusterStatus." + + "SubstateB\003\340A\003\"\177\n\005State\022\013\n\007UNKNOWN\020\000\022\014\n\010C" + + "REATING\020\001\022\013\n\007RUNNING\020\002\022\t\n\005ERROR\020\003\022\014\n\010DEL" + + "ETING\020\004\022\014\n\010UPDATING\020\005\022\014\n\010STOPPING\020\006\022\013\n\007S" + + "TOPPED\020\007\022\014\n\010STARTING\020\010\"<\n\010Substate\022\017\n\013UN" + + "SPECIFIED\020\000\022\r\n\tUNHEALTHY\020\001\022\020\n\014STALE_STAT" + + "US\020\002\"\376\001\n\016SoftwareConfig\022\032\n\rimage_version" + + "\030\001 \001(\tB\003\340A\001\022V\n\nproperties\030\002 \003(\0132=.google" + + ".cloud.dataproc.v1beta2.SoftwareConfig.P" + + "ropertiesEntryB\003\340A\001\022E\n\023optional_componen" + + "ts\030\003 \003(\0162(.google.cloud.dataproc.v1beta2" + + ".Component\0321\n\017PropertiesEntry\022\013\n\003key\030\001 \001" + + "(\t\022\r\n\005value\030\002 \001(\t:\0028\001\"\244\002\n\016ClusterMetrics" + + "\022T\n\014hdfs_metrics\030\001 \003(\0132>.google.cloud.da" + + "taproc.v1beta2.ClusterMetrics.HdfsMetric" + + "sEntry\022T\n\014yarn_metrics\030\002 \003(\0132>.google.cl" + + "oud.dataproc.v1beta2.ClusterMetrics.Yarn" + + "MetricsEntry\0322\n\020HdfsMetricsEntry\022\013\n\003key\030" + + "\001 \001(\t\022\r\n\005value\030\002 \001(\003:\0028\001\0322\n\020YarnMetricsE" + + "ntry\022\013\n\003key\030\001 \001(\t\022\r\n\005value\030\002 \001(\003:\0028\001\"\233\001\n" + + "\024CreateClusterRequest\022\027\n\nproject_id\030\001 \001(" + + "\tB\003\340A\002\022\023\n\006region\030\003 \001(\tB\003\340A\002\022<\n\007cluster\030\002" + + " \001(\0132&.google.cloud.dataproc.v1beta2.Clu" + + "sterB\003\340A\002\022\027\n\nrequest_id\030\004 \001(\tB\003\340A\001\"\263\002\n\024U" + + "pdateClusterRequest\022\027\n\nproject_id\030\001 \001(\tB" + + "\003\340A\002\022\023\n\006region\030\005 \001(\tB\003\340A\002\022\031\n\014cluster_nam" + + "e\030\002 \001(\tB\003\340A\002\022<\n\007cluster\030\003 \001(\0132&.google.c" + + "loud.dataproc.v1beta2.ClusterB\003\340A\002\022E\n\035gr" + + "aceful_decommission_timeout\030\006 \001(\0132\031.goog" + + "le.protobuf.DurationB\003\340A\001\0224\n\013update_mask" + + "\030\004 \001(\0132\032.google.protobuf.FieldMaskB\003\340A\002\022" + + "\027\n\nrequest_id\030\007 \001(\tB\003\340A\001\"\223\001\n\024DeleteClust" + "erRequest\022\027\n\nproject_id\030\001 \001(\tB\003\340A\002\022\023\n\006re" - + "gion\030\005 \001(\tB\003\340A\002\022\031\n\014cluster_name\030\002 \001(\tB\003\340" - + "A\002\022<\n\007cluster\030\003 \001(\0132&.google.cloud.datap" - + "roc.v1beta2.ClusterB\003\340A\002\022E\n\035graceful_dec" - + "ommission_timeout\030\006 \001(\0132\031.google.protobu" - + "f.DurationB\003\340A\001\0224\n\013update_mask\030\004 \001(\0132\032.g" - + "oogle.protobuf.FieldMaskB\003\340A\002\022\027\n\nrequest" - + "_id\030\007 \001(\tB\003\340A\001\"\223\001\n\024DeleteClusterRequest\022" - + "\027\n\nproject_id\030\001 \001(\tB\003\340A\002\022\023\n\006region\030\003 \001(\t" - + "B\003\340A\002\022\031\n\014cluster_name\030\002 \001(\tB\003\340A\002\022\031\n\014clus" - + "ter_uuid\030\004 \001(\tB\003\340A\001\022\027\n\nrequest_id\030\005 \001(\tB" - + "\003\340A\001\"\\\n\021GetClusterRequest\022\027\n\nproject_id\030" - + "\001 \001(\tB\003\340A\002\022\023\n\006region\030\003 \001(\tB\003\340A\002\022\031\n\014clust" - + "er_name\030\002 \001(\tB\003\340A\002\"\211\001\n\023ListClustersReque" - + "st\022\027\n\nproject_id\030\001 \001(\tB\003\340A\002\022\023\n\006region\030\004 " - + "\001(\tB\003\340A\002\022\023\n\006filter\030\005 \001(\tB\003\340A\001\022\026\n\tpage_si" - + "ze\030\002 \001(\005B\003\340A\001\022\027\n\npage_token\030\003 \001(\tB\003\340A\001\"s" - + "\n\024ListClustersResponse\022=\n\010clusters\030\001 \003(\013" - + "2&.google.cloud.dataproc.v1beta2.Cluster" - + "B\003\340A\003\022\034\n\017next_page_token\030\002 \001(\tB\003\340A\003\"a\n\026D" - + "iagnoseClusterRequest\022\027\n\nproject_id\030\001 \001(" - + "\tB\003\340A\002\022\023\n\006region\030\003 \001(\tB\003\340A\002\022\031\n\014cluster_n" - + "ame\030\002 \001(\tB\003\340A\002\"1\n\026DiagnoseClusterResults" - + "\022\027\n\noutput_uri\030\001 \001(\tB\003\340A\003\"\375\001\n\023Reservatio" - + "nAffinity\022^\n\030consume_reservation_type\030\001 " - + "\001(\01627.google.cloud.dataproc.v1beta2.Rese" - + "rvationAffinity.TypeB\003\340A\001\022\020\n\003key\030\002 \001(\tB\003" - + "\340A\001\022\023\n\006values\030\003 \003(\tB\003\340A\001\"_\n\004Type\022\024\n\020TYPE" - + "_UNSPECIFIED\020\000\022\022\n\016NO_RESERVATION\020\001\022\023\n\017AN" - + "Y_RESERVATION\020\002\022\030\n\024SPECIFIC_RESERVATION\020" - + "\0032\347\r\n\021ClusterController\022\221\002\n\rCreateCluste" - + "r\0223.google.cloud.dataproc.v1beta2.Create" - + "ClusterRequest\032\035.google.longrunning.Oper" - + "ation\"\253\001\202\323\344\223\002C\"8/v1beta2/projects/{proje" - + "ct_id}/regions/{region}/clusters:\007cluste" - + "r\332A\033project_id, region, cluster\312AA\n\007Clus" - + "ter\0226google.cloud.dataproc.v1beta2.Clust" - + "erOperationMetadata\022\273\002\n\rUpdateCluster\0223." - + "google.cloud.dataproc.v1beta2.UpdateClus" - + "terRequest\032\035.google.longrunning.Operatio" - + "n\"\325\001\202\323\344\223\002R2G/v1beta2/projects/{project_i" - + "d}/regions/{region}/clusters/{cluster_na" - + "me}:\007cluster\332A6project_id, region, clust" - + "er_name, cluster, update_mask\312AA\n\007Cluste" - + "r\0226google.cloud.dataproc.v1beta2.Cluster" - + "OperationMetadata\022\252\002\n\rDeleteCluster\0223.go" - + "ogle.cloud.dataproc.v1beta2.DeleteCluste" - + "rRequest\032\035.google.longrunning.Operation\"" - + "\304\001\202\323\344\223\002I*G/v1beta2/projects/{project_id}" - + "/regions/{region}/clusters/{cluster_name" - + "}\332A project_id, region, cluster_name\312AO\n" - + "\025google.protobuf.Empty\0226google.cloud.dat" - + "aproc.v1beta2.ClusterOperationMetadata\022\332" - + "\001\n\nGetCluster\0220.google.cloud.dataproc.v1" - + "beta2.GetClusterRequest\032&.google.cloud.d" - + "ataproc.v1beta2.Cluster\"r\202\323\344\223\002I\022G/v1beta" - + "2/projects/{project_id}/regions/{region}" - + "/clusters/{cluster_name}\332A project_id, r" - + "egion, cluster_name\022\353\001\n\014ListClusters\0222.g" - + "oogle.cloud.dataproc.v1beta2.ListCluster" - + "sRequest\0323.google.cloud.dataproc.v1beta2" - + ".ListClustersResponse\"r\202\323\344\223\002:\0228/v1beta2/" - + "projects/{project_id}/regions/{region}/c" - + "lusters\332A\022project_id, region\332A\032project_i" - + "d, region, filter\022\272\002\n\017DiagnoseCluster\0225." - + "google.cloud.dataproc.v1beta2.DiagnoseCl" - + "usterRequest\032\035.google.longrunning.Operat" - + "ion\"\320\001\202\323\344\223\002U\"P/v1beta2/projects/{project" - + "_id}/regions/{region}/clusters/{cluster_" - + "name}:diagnose:\001*\332A project_id, region, " - + "cluster_name\312AO\n\025google.protobuf.Empty\0226" - + "google.cloud.dataproc.v1beta2.ClusterOpe" - + "rationMetadata\032K\312A\027dataproc.googleapis.c" - + "om\322A.https://www.googleapis.com/auth/clo" - + "ud-platformB{\n!com.google.cloud.dataproc" - + ".v1beta2B\rClustersProtoP\001ZEgoogle.golang" - + ".org/genproto/googleapis/cloud/dataproc/" - + "v1beta2;dataprocb\006proto3" + + "gion\030\003 \001(\tB\003\340A\002\022\031\n\014cluster_name\030\002 \001(\tB\003\340" + + "A\002\022\031\n\014cluster_uuid\030\004 \001(\tB\003\340A\001\022\027\n\nrequest" + + "_id\030\005 \001(\tB\003\340A\001\"\\\n\021GetClusterRequest\022\027\n\np" + + "roject_id\030\001 \001(\tB\003\340A\002\022\023\n\006region\030\003 \001(\tB\003\340A" + + "\002\022\031\n\014cluster_name\030\002 \001(\tB\003\340A\002\"\211\001\n\023ListClu" + + "stersRequest\022\027\n\nproject_id\030\001 \001(\tB\003\340A\002\022\023\n" + + "\006region\030\004 \001(\tB\003\340A\002\022\023\n\006filter\030\005 \001(\tB\003\340A\001\022" + + "\026\n\tpage_size\030\002 \001(\005B\003\340A\001\022\027\n\npage_token\030\003 " + + "\001(\tB\003\340A\001\"s\n\024ListClustersResponse\022=\n\010clus" + + "ters\030\001 \003(\0132&.google.cloud.dataproc.v1bet" + + "a2.ClusterB\003\340A\003\022\034\n\017next_page_token\030\002 \001(\t" + + "B\003\340A\003\"a\n\026DiagnoseClusterRequest\022\027\n\nproje" + + "ct_id\030\001 \001(\tB\003\340A\002\022\023\n\006region\030\003 \001(\tB\003\340A\002\022\031\n" + + "\014cluster_name\030\002 \001(\tB\003\340A\002\"1\n\026DiagnoseClus" + + "terResults\022\027\n\noutput_uri\030\001 \001(\tB\003\340A\003\"\375\001\n\023" + + "ReservationAffinity\022^\n\030consume_reservati" + + "on_type\030\001 \001(\01627.google.cloud.dataproc.v1" + + "beta2.ReservationAffinity.TypeB\003\340A\001\022\020\n\003k" + + "ey\030\002 \001(\tB\003\340A\001\022\023\n\006values\030\003 \003(\tB\003\340A\001\"_\n\004Ty" + + "pe\022\024\n\020TYPE_UNSPECIFIED\020\000\022\022\n\016NO_RESERVATI" + + "ON\020\001\022\023\n\017ANY_RESERVATION\020\002\022\030\n\024SPECIFIC_RE" + + "SERVATION\020\0032\347\r\n\021ClusterController\022\221\002\n\rCr" + + "eateCluster\0223.google.cloud.dataproc.v1be" + + "ta2.CreateClusterRequest\032\035.google.longru" + + "nning.Operation\"\253\001\202\323\344\223\002C\"8/v1beta2/proje" + + "cts/{project_id}/regions/{region}/cluste" + + "rs:\007cluster\332A\033project_id, region, cluste" + + "r\312AA\n\007Cluster\0226google.cloud.dataproc.v1b" + + "eta2.ClusterOperationMetadata\022\273\002\n\rUpdate" + + "Cluster\0223.google.cloud.dataproc.v1beta2." + + "UpdateClusterRequest\032\035.google.longrunnin" + + "g.Operation\"\325\001\202\323\344\223\002R2G/v1beta2/projects/" + + "{project_id}/regions/{region}/clusters/{" + + "cluster_name}:\007cluster\332A6project_id, reg" + + "ion, cluster_name, cluster, update_mask\312" + + "AA\n\007Cluster\0226google.cloud.dataproc.v1bet" + + "a2.ClusterOperationMetadata\022\252\002\n\rDeleteCl" + + "uster\0223.google.cloud.dataproc.v1beta2.De" + + "leteClusterRequest\032\035.google.longrunning." + + "Operation\"\304\001\202\323\344\223\002I*G/v1beta2/projects/{p" + + "roject_id}/regions/{region}/clusters/{cl" + + "uster_name}\332A project_id, region, cluste" + + "r_name\312AO\n\025google.protobuf.Empty\0226google" + + ".cloud.dataproc.v1beta2.ClusterOperation" + + "Metadata\022\332\001\n\nGetCluster\0220.google.cloud.d" + + "ataproc.v1beta2.GetClusterRequest\032&.goog" + + "le.cloud.dataproc.v1beta2.Cluster\"r\202\323\344\223\002" + + "I\022G/v1beta2/projects/{project_id}/region" + + "s/{region}/clusters/{cluster_name}\332A pro" + + "ject_id, region, cluster_name\022\353\001\n\014ListCl" + + "usters\0222.google.cloud.dataproc.v1beta2.L" + + "istClustersRequest\0323.google.cloud.datapr" + + "oc.v1beta2.ListClustersResponse\"r\202\323\344\223\002:\022" + + "8/v1beta2/projects/{project_id}/regions/" + + "{region}/clusters\332A\022project_id, region\332A" + + "\032project_id, region, filter\022\272\002\n\017Diagnose" + + "Cluster\0225.google.cloud.dataproc.v1beta2." + + "DiagnoseClusterRequest\032\035.google.longrunn" + + "ing.Operation\"\320\001\202\323\344\223\002U\"P/v1beta2/project" + + "s/{project_id}/regions/{region}/clusters" + + "/{cluster_name}:diagnose:\001*\332A project_id" + + ", region, cluster_name\312AO\n\025google.protob" + + "uf.Empty\0226google.cloud.dataproc.v1beta2." + + "ClusterOperationMetadata\032K\312A\027dataproc.go" + + "ogleapis.com\322A.https://www.googleapis.co" + + "m/auth/cloud-platformB\275\002\n!com.google.clo" + + "ud.dataproc.v1beta2B\rClustersProtoP\001ZEgo" + + "ogle.golang.org/genproto/googleapis/clou" + + "d/dataproc/v1beta2;dataproc\352A^\n containe" + + "r.googleapis.com/Cluster\022:projects/{proj" + + "ect}/locations/{location}/clusters/{clus" + + "ter}\352A^\n metastore.googleapis.com/Servic" + + "e\022:projects/{project}/locations/{locatio" + + "n}/services/{service}b\006proto3" }; descriptor = com.google.protobuf.Descriptors.FileDescriptor.internalBuildGeneratedFileFrom( @@ -439,6 +449,7 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { internal_static_google_cloud_dataproc_v1beta2_ClusterConfig_descriptor, new java.lang.String[] { "ConfigBucket", + "TempBucket", "GceClusterConfig", "MasterConfig", "WorkerConfig", @@ -542,6 +553,7 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { "MachineTypeUri", "DiskConfig", "IsPreemptible", + "Preemptibility", "ManagedGroupConfig", "Accelerators", "MinCpuPlatform", @@ -755,6 +767,7 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { registry.add(com.google.api.AnnotationsProto.http); registry.add(com.google.api.ClientProto.methodSignature); registry.add(com.google.api.ClientProto.oauthScopes); + registry.add(com.google.api.ResourceProto.resourceDefinition); registry.add(com.google.api.ResourceProto.resourceReference); registry.add(com.google.longrunning.OperationsProto.operationInfo); com.google.protobuf.Descriptors.FileDescriptor.internalUpdateFileDescriptor( diff --git a/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/Component.java b/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/Component.java index 2e615875..25c00c02 100644 --- a/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/Component.java +++ b/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/Component.java @@ -32,7 +32,7 @@ public enum Component implements com.google.protobuf.ProtocolMessageEnum { * * *
-   * Unspecified component.
+   * Unspecified component. Specifying this will cause Cluster creation to fail.
    * 
* * COMPONENT_UNSPECIFIED = 0; @@ -48,16 +48,6 @@ public enum Component implements com.google.protobuf.ProtocolMessageEnum { * ANACONDA = 5; */ ANACONDA(5), - /** - * - * - *
-   * Docker
-   * 
- * - * DOCKER = 13; - */ - DOCKER(13), /** * * @@ -72,12 +62,12 @@ public enum Component implements com.google.protobuf.ProtocolMessageEnum { * * *
-   * Flink
+   * HBase.
    * 
* - * FLINK = 14; + * HBASE = 11; */ - FLINK(14), + HBASE(11), /** * * @@ -165,7 +155,7 @@ public enum Component implements com.google.protobuf.ProtocolMessageEnum { * * *
-   * Unspecified component.
+   * Unspecified component. Specifying this will cause Cluster creation to fail.
    * 
* * COMPONENT_UNSPECIFIED = 0; @@ -181,16 +171,6 @@ public enum Component implements com.google.protobuf.ProtocolMessageEnum { * ANACONDA = 5; */ public static final int ANACONDA_VALUE = 5; - /** - * - * - *
-   * Docker
-   * 
- * - * DOCKER = 13; - */ - public static final int DOCKER_VALUE = 13; /** * * @@ -205,12 +185,12 @@ public enum Component implements com.google.protobuf.ProtocolMessageEnum { * * *
-   * Flink
+   * HBase.
    * 
* - * FLINK = 14; + * HBASE = 11; */ - public static final int FLINK_VALUE = 14; + public static final int HBASE_VALUE = 11; /** * * @@ -320,12 +300,10 @@ public static Component forNumber(int value) { return COMPONENT_UNSPECIFIED; case 5: return ANACONDA; - case 13: - return DOCKER; case 9: return DRUID; - case 14: - return FLINK; + case 11: + return HBASE; case 3: return HIVE_WEBHCAT; case 1: diff --git a/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/CreateClusterRequest.java b/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/CreateClusterRequest.java index 0cd35962..d02fb356 100644 --- a/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/CreateClusterRequest.java +++ b/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/CreateClusterRequest.java @@ -304,10 +304,11 @@ public com.google.cloud.dataproc.v1beta2.ClusterOrBuilder getClusterOrBuilder() * *
    * Optional. A unique id used to identify the request. If the server
-   * receives two [CreateClusterRequest][google.cloud.dataproc.v1beta2.CreateClusterRequest] requests  with the same
-   * id, then the second request will be ignored and the
-   * first [google.longrunning.Operation][google.longrunning.Operation] created and stored in the backend
-   * is returned.
+   * receives two
+   * [CreateClusterRequest][google.cloud.dataproc.v1beta2.CreateClusterRequest]
+   * requests  with the same id, then the second request will be ignored and the
+   * first [google.longrunning.Operation][google.longrunning.Operation] created
+   * and stored in the backend is returned.
    * It is recommended to always set this value to a
    * [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier).
    * The id must contain only letters (a-z, A-Z), numbers (0-9),
@@ -335,10 +336,11 @@ public java.lang.String getRequestId() {
    *
    * 
    * Optional. A unique id used to identify the request. If the server
-   * receives two [CreateClusterRequest][google.cloud.dataproc.v1beta2.CreateClusterRequest] requests  with the same
-   * id, then the second request will be ignored and the
-   * first [google.longrunning.Operation][google.longrunning.Operation] created and stored in the backend
-   * is returned.
+   * receives two
+   * [CreateClusterRequest][google.cloud.dataproc.v1beta2.CreateClusterRequest]
+   * requests  with the same id, then the second request will be ignored and the
+   * first [google.longrunning.Operation][google.longrunning.Operation] created
+   * and stored in the backend is returned.
    * It is recommended to always set this value to a
    * [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier).
    * The id must contain only letters (a-z, A-Z), numbers (0-9),
@@ -1167,10 +1169,11 @@ public com.google.cloud.dataproc.v1beta2.ClusterOrBuilder getClusterOrBuilder()
      *
      * 
      * Optional. A unique id used to identify the request. If the server
-     * receives two [CreateClusterRequest][google.cloud.dataproc.v1beta2.CreateClusterRequest] requests  with the same
-     * id, then the second request will be ignored and the
-     * first [google.longrunning.Operation][google.longrunning.Operation] created and stored in the backend
-     * is returned.
+     * receives two
+     * [CreateClusterRequest][google.cloud.dataproc.v1beta2.CreateClusterRequest]
+     * requests  with the same id, then the second request will be ignored and the
+     * first [google.longrunning.Operation][google.longrunning.Operation] created
+     * and stored in the backend is returned.
      * It is recommended to always set this value to a
      * [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier).
      * The id must contain only letters (a-z, A-Z), numbers (0-9),
@@ -1197,10 +1200,11 @@ public java.lang.String getRequestId() {
      *
      * 
      * Optional. A unique id used to identify the request. If the server
-     * receives two [CreateClusterRequest][google.cloud.dataproc.v1beta2.CreateClusterRequest] requests  with the same
-     * id, then the second request will be ignored and the
-     * first [google.longrunning.Operation][google.longrunning.Operation] created and stored in the backend
-     * is returned.
+     * receives two
+     * [CreateClusterRequest][google.cloud.dataproc.v1beta2.CreateClusterRequest]
+     * requests  with the same id, then the second request will be ignored and the
+     * first [google.longrunning.Operation][google.longrunning.Operation] created
+     * and stored in the backend is returned.
      * It is recommended to always set this value to a
      * [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier).
      * The id must contain only letters (a-z, A-Z), numbers (0-9),
@@ -1227,10 +1231,11 @@ public com.google.protobuf.ByteString getRequestIdBytes() {
      *
      * 
      * Optional. A unique id used to identify the request. If the server
-     * receives two [CreateClusterRequest][google.cloud.dataproc.v1beta2.CreateClusterRequest] requests  with the same
-     * id, then the second request will be ignored and the
-     * first [google.longrunning.Operation][google.longrunning.Operation] created and stored in the backend
-     * is returned.
+     * receives two
+     * [CreateClusterRequest][google.cloud.dataproc.v1beta2.CreateClusterRequest]
+     * requests  with the same id, then the second request will be ignored and the
+     * first [google.longrunning.Operation][google.longrunning.Operation] created
+     * and stored in the backend is returned.
      * It is recommended to always set this value to a
      * [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier).
      * The id must contain only letters (a-z, A-Z), numbers (0-9),
@@ -1256,10 +1261,11 @@ public Builder setRequestId(java.lang.String value) {
      *
      * 
      * Optional. A unique id used to identify the request. If the server
-     * receives two [CreateClusterRequest][google.cloud.dataproc.v1beta2.CreateClusterRequest] requests  with the same
-     * id, then the second request will be ignored and the
-     * first [google.longrunning.Operation][google.longrunning.Operation] created and stored in the backend
-     * is returned.
+     * receives two
+     * [CreateClusterRequest][google.cloud.dataproc.v1beta2.CreateClusterRequest]
+     * requests  with the same id, then the second request will be ignored and the
+     * first [google.longrunning.Operation][google.longrunning.Operation] created
+     * and stored in the backend is returned.
      * It is recommended to always set this value to a
      * [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier).
      * The id must contain only letters (a-z, A-Z), numbers (0-9),
@@ -1281,10 +1287,11 @@ public Builder clearRequestId() {
      *
      * 
      * Optional. A unique id used to identify the request. If the server
-     * receives two [CreateClusterRequest][google.cloud.dataproc.v1beta2.CreateClusterRequest] requests  with the same
-     * id, then the second request will be ignored and the
-     * first [google.longrunning.Operation][google.longrunning.Operation] created and stored in the backend
-     * is returned.
+     * receives two
+     * [CreateClusterRequest][google.cloud.dataproc.v1beta2.CreateClusterRequest]
+     * requests  with the same id, then the second request will be ignored and the
+     * first [google.longrunning.Operation][google.longrunning.Operation] created
+     * and stored in the backend is returned.
      * It is recommended to always set this value to a
      * [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier).
      * The id must contain only letters (a-z, A-Z), numbers (0-9),
diff --git a/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/CreateClusterRequestOrBuilder.java b/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/CreateClusterRequestOrBuilder.java
index 2b3da599..a800b629 100644
--- a/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/CreateClusterRequestOrBuilder.java
+++ b/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/CreateClusterRequestOrBuilder.java
@@ -121,10 +121,11 @@ public interface CreateClusterRequestOrBuilder
    *
    * 
    * Optional. A unique id used to identify the request. If the server
-   * receives two [CreateClusterRequest][google.cloud.dataproc.v1beta2.CreateClusterRequest] requests  with the same
-   * id, then the second request will be ignored and the
-   * first [google.longrunning.Operation][google.longrunning.Operation] created and stored in the backend
-   * is returned.
+   * receives two
+   * [CreateClusterRequest][google.cloud.dataproc.v1beta2.CreateClusterRequest]
+   * requests  with the same id, then the second request will be ignored and the
+   * first [google.longrunning.Operation][google.longrunning.Operation] created
+   * and stored in the backend is returned.
    * It is recommended to always set this value to a
    * [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier).
    * The id must contain only letters (a-z, A-Z), numbers (0-9),
@@ -141,10 +142,11 @@ public interface CreateClusterRequestOrBuilder
    *
    * 
    * Optional. A unique id used to identify the request. If the server
-   * receives two [CreateClusterRequest][google.cloud.dataproc.v1beta2.CreateClusterRequest] requests  with the same
-   * id, then the second request will be ignored and the
-   * first [google.longrunning.Operation][google.longrunning.Operation] created and stored in the backend
-   * is returned.
+   * receives two
+   * [CreateClusterRequest][google.cloud.dataproc.v1beta2.CreateClusterRequest]
+   * requests  with the same id, then the second request will be ignored and the
+   * first [google.longrunning.Operation][google.longrunning.Operation] created
+   * and stored in the backend is returned.
    * It is recommended to always set this value to a
    * [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier).
    * The id must contain only letters (a-z, A-Z), numbers (0-9),
diff --git a/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/DeleteClusterRequest.java b/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/DeleteClusterRequest.java
index 947de2cc..6e510efc 100644
--- a/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/DeleteClusterRequest.java
+++ b/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/DeleteClusterRequest.java
@@ -350,10 +350,11 @@ public com.google.protobuf.ByteString getClusterUuidBytes() {
    *
    * 
    * Optional. A unique id used to identify the request. If the server
-   * receives two [DeleteClusterRequest][google.cloud.dataproc.v1beta2.DeleteClusterRequest] requests  with the same
-   * id, then the second request will be ignored and the
-   * first [google.longrunning.Operation][google.longrunning.Operation] created and stored in the
-   * backend is returned.
+   * receives two
+   * [DeleteClusterRequest][google.cloud.dataproc.v1beta2.DeleteClusterRequest]
+   * requests  with the same id, then the second request will be ignored and the
+   * first [google.longrunning.Operation][google.longrunning.Operation] created
+   * and stored in the backend is returned.
    * It is recommended to always set this value to a
    * [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier).
    * The id must contain only letters (a-z, A-Z), numbers (0-9),
@@ -381,10 +382,11 @@ public java.lang.String getRequestId() {
    *
    * 
    * Optional. A unique id used to identify the request. If the server
-   * receives two [DeleteClusterRequest][google.cloud.dataproc.v1beta2.DeleteClusterRequest] requests  with the same
-   * id, then the second request will be ignored and the
-   * first [google.longrunning.Operation][google.longrunning.Operation] created and stored in the
-   * backend is returned.
+   * receives two
+   * [DeleteClusterRequest][google.cloud.dataproc.v1beta2.DeleteClusterRequest]
+   * requests  with the same id, then the second request will be ignored and the
+   * first [google.longrunning.Operation][google.longrunning.Operation] created
+   * and stored in the backend is returned.
    * It is recommended to always set this value to a
    * [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier).
    * The id must contain only letters (a-z, A-Z), numbers (0-9),
@@ -1231,10 +1233,11 @@ public Builder setClusterUuidBytes(com.google.protobuf.ByteString value) {
      *
      * 
      * Optional. A unique id used to identify the request. If the server
-     * receives two [DeleteClusterRequest][google.cloud.dataproc.v1beta2.DeleteClusterRequest] requests  with the same
-     * id, then the second request will be ignored and the
-     * first [google.longrunning.Operation][google.longrunning.Operation] created and stored in the
-     * backend is returned.
+     * receives two
+     * [DeleteClusterRequest][google.cloud.dataproc.v1beta2.DeleteClusterRequest]
+     * requests  with the same id, then the second request will be ignored and the
+     * first [google.longrunning.Operation][google.longrunning.Operation] created
+     * and stored in the backend is returned.
      * It is recommended to always set this value to a
      * [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier).
      * The id must contain only letters (a-z, A-Z), numbers (0-9),
@@ -1261,10 +1264,11 @@ public java.lang.String getRequestId() {
      *
      * 
      * Optional. A unique id used to identify the request. If the server
-     * receives two [DeleteClusterRequest][google.cloud.dataproc.v1beta2.DeleteClusterRequest] requests  with the same
-     * id, then the second request will be ignored and the
-     * first [google.longrunning.Operation][google.longrunning.Operation] created and stored in the
-     * backend is returned.
+     * receives two
+     * [DeleteClusterRequest][google.cloud.dataproc.v1beta2.DeleteClusterRequest]
+     * requests  with the same id, then the second request will be ignored and the
+     * first [google.longrunning.Operation][google.longrunning.Operation] created
+     * and stored in the backend is returned.
      * It is recommended to always set this value to a
      * [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier).
      * The id must contain only letters (a-z, A-Z), numbers (0-9),
@@ -1291,10 +1295,11 @@ public com.google.protobuf.ByteString getRequestIdBytes() {
      *
      * 
      * Optional. A unique id used to identify the request. If the server
-     * receives two [DeleteClusterRequest][google.cloud.dataproc.v1beta2.DeleteClusterRequest] requests  with the same
-     * id, then the second request will be ignored and the
-     * first [google.longrunning.Operation][google.longrunning.Operation] created and stored in the
-     * backend is returned.
+     * receives two
+     * [DeleteClusterRequest][google.cloud.dataproc.v1beta2.DeleteClusterRequest]
+     * requests  with the same id, then the second request will be ignored and the
+     * first [google.longrunning.Operation][google.longrunning.Operation] created
+     * and stored in the backend is returned.
      * It is recommended to always set this value to a
      * [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier).
      * The id must contain only letters (a-z, A-Z), numbers (0-9),
@@ -1320,10 +1325,11 @@ public Builder setRequestId(java.lang.String value) {
      *
      * 
      * Optional. A unique id used to identify the request. If the server
-     * receives two [DeleteClusterRequest][google.cloud.dataproc.v1beta2.DeleteClusterRequest] requests  with the same
-     * id, then the second request will be ignored and the
-     * first [google.longrunning.Operation][google.longrunning.Operation] created and stored in the
-     * backend is returned.
+     * receives two
+     * [DeleteClusterRequest][google.cloud.dataproc.v1beta2.DeleteClusterRequest]
+     * requests  with the same id, then the second request will be ignored and the
+     * first [google.longrunning.Operation][google.longrunning.Operation] created
+     * and stored in the backend is returned.
      * It is recommended to always set this value to a
      * [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier).
      * The id must contain only letters (a-z, A-Z), numbers (0-9),
@@ -1345,10 +1351,11 @@ public Builder clearRequestId() {
      *
      * 
      * Optional. A unique id used to identify the request. If the server
-     * receives two [DeleteClusterRequest][google.cloud.dataproc.v1beta2.DeleteClusterRequest] requests  with the same
-     * id, then the second request will be ignored and the
-     * first [google.longrunning.Operation][google.longrunning.Operation] created and stored in the
-     * backend is returned.
+     * receives two
+     * [DeleteClusterRequest][google.cloud.dataproc.v1beta2.DeleteClusterRequest]
+     * requests  with the same id, then the second request will be ignored and the
+     * first [google.longrunning.Operation][google.longrunning.Operation] created
+     * and stored in the backend is returned.
      * It is recommended to always set this value to a
      * [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier).
      * The id must contain only letters (a-z, A-Z), numbers (0-9),
diff --git a/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/DeleteClusterRequestOrBuilder.java b/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/DeleteClusterRequestOrBuilder.java
index 33b16e97..2653cb7b 100644
--- a/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/DeleteClusterRequestOrBuilder.java
+++ b/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/DeleteClusterRequestOrBuilder.java
@@ -132,10 +132,11 @@ public interface DeleteClusterRequestOrBuilder
    *
    * 
    * Optional. A unique id used to identify the request. If the server
-   * receives two [DeleteClusterRequest][google.cloud.dataproc.v1beta2.DeleteClusterRequest] requests  with the same
-   * id, then the second request will be ignored and the
-   * first [google.longrunning.Operation][google.longrunning.Operation] created and stored in the
-   * backend is returned.
+   * receives two
+   * [DeleteClusterRequest][google.cloud.dataproc.v1beta2.DeleteClusterRequest]
+   * requests  with the same id, then the second request will be ignored and the
+   * first [google.longrunning.Operation][google.longrunning.Operation] created
+   * and stored in the backend is returned.
    * It is recommended to always set this value to a
    * [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier).
    * The id must contain only letters (a-z, A-Z), numbers (0-9),
@@ -152,10 +153,11 @@ public interface DeleteClusterRequestOrBuilder
    *
    * 
    * Optional. A unique id used to identify the request. If the server
-   * receives two [DeleteClusterRequest][google.cloud.dataproc.v1beta2.DeleteClusterRequest] requests  with the same
-   * id, then the second request will be ignored and the
-   * first [google.longrunning.Operation][google.longrunning.Operation] created and stored in the
-   * backend is returned.
+   * receives two
+   * [DeleteClusterRequest][google.cloud.dataproc.v1beta2.DeleteClusterRequest]
+   * requests  with the same id, then the second request will be ignored and the
+   * first [google.longrunning.Operation][google.longrunning.Operation] created
+   * and stored in the backend is returned.
    * It is recommended to always set this value to a
    * [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier).
    * The id must contain only letters (a-z, A-Z), numbers (0-9),
diff --git a/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/GceClusterConfig.java b/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/GceClusterConfig.java
index 27e8b2ea..510036eb 100644
--- a/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/GceClusterConfig.java
+++ b/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/GceClusterConfig.java
@@ -434,7 +434,7 @@ public boolean getInternalIpOnly() {
    *
    * 
    * Optional. The [Dataproc service
-   * account](https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/service-accounts#service_accounts_in_cloud_dataproc)
+   * account](https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/service-accounts#service_accounts_in_dataproc)
    * (also see [VM Data Plane
    * identity](https://cloud.google.com/dataproc/docs/concepts/iam/dataproc-principals#vm_service_account_data_plane_identity))
    * used by Dataproc cluster VM instances to access Google Cloud Platform
@@ -466,7 +466,7 @@ public java.lang.String getServiceAccount() {
    *
    * 
    * Optional. The [Dataproc service
-   * account](https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/service-accounts#service_accounts_in_cloud_dataproc)
+   * account](https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/service-accounts#service_accounts_in_dataproc)
    * (also see [VM Data Plane
    * identity](https://cloud.google.com/dataproc/docs/concepts/iam/dataproc-principals#vm_service_account_data_plane_identity))
    * used by Dataproc cluster VM instances to access Google Cloud Platform
@@ -1849,7 +1849,7 @@ public Builder clearInternalIpOnly() {
      *
      * 
      * Optional. The [Dataproc service
-     * account](https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/service-accounts#service_accounts_in_cloud_dataproc)
+     * account](https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/service-accounts#service_accounts_in_dataproc)
      * (also see [VM Data Plane
      * identity](https://cloud.google.com/dataproc/docs/concepts/iam/dataproc-principals#vm_service_account_data_plane_identity))
      * used by Dataproc cluster VM instances to access Google Cloud Platform
@@ -1880,7 +1880,7 @@ public java.lang.String getServiceAccount() {
      *
      * 
      * Optional. The [Dataproc service
-     * account](https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/service-accounts#service_accounts_in_cloud_dataproc)
+     * account](https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/service-accounts#service_accounts_in_dataproc)
      * (also see [VM Data Plane
      * identity](https://cloud.google.com/dataproc/docs/concepts/iam/dataproc-principals#vm_service_account_data_plane_identity))
      * used by Dataproc cluster VM instances to access Google Cloud Platform
@@ -1911,7 +1911,7 @@ public com.google.protobuf.ByteString getServiceAccountBytes() {
      *
      * 
      * Optional. The [Dataproc service
-     * account](https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/service-accounts#service_accounts_in_cloud_dataproc)
+     * account](https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/service-accounts#service_accounts_in_dataproc)
      * (also see [VM Data Plane
      * identity](https://cloud.google.com/dataproc/docs/concepts/iam/dataproc-principals#vm_service_account_data_plane_identity))
      * used by Dataproc cluster VM instances to access Google Cloud Platform
@@ -1941,7 +1941,7 @@ public Builder setServiceAccount(java.lang.String value) {
      *
      * 
      * Optional. The [Dataproc service
-     * account](https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/service-accounts#service_accounts_in_cloud_dataproc)
+     * account](https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/service-accounts#service_accounts_in_dataproc)
      * (also see [VM Data Plane
      * identity](https://cloud.google.com/dataproc/docs/concepts/iam/dataproc-principals#vm_service_account_data_plane_identity))
      * used by Dataproc cluster VM instances to access Google Cloud Platform
@@ -1967,7 +1967,7 @@ public Builder clearServiceAccount() {
      *
      * 
      * Optional. The [Dataproc service
-     * account](https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/service-accounts#service_accounts_in_cloud_dataproc)
+     * account](https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/service-accounts#service_accounts_in_dataproc)
      * (also see [VM Data Plane
      * identity](https://cloud.google.com/dataproc/docs/concepts/iam/dataproc-principals#vm_service_account_data_plane_identity))
      * used by Dataproc cluster VM instances to access Google Cloud Platform
diff --git a/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/GceClusterConfigOrBuilder.java b/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/GceClusterConfigOrBuilder.java
index 1c431378..8867f189 100644
--- a/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/GceClusterConfigOrBuilder.java
+++ b/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/GceClusterConfigOrBuilder.java
@@ -165,7 +165,7 @@ public interface GceClusterConfigOrBuilder
    *
    * 
    * Optional. The [Dataproc service
-   * account](https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/service-accounts#service_accounts_in_cloud_dataproc)
+   * account](https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/service-accounts#service_accounts_in_dataproc)
    * (also see [VM Data Plane
    * identity](https://cloud.google.com/dataproc/docs/concepts/iam/dataproc-principals#vm_service_account_data_plane_identity))
    * used by Dataproc cluster VM instances to access Google Cloud Platform
@@ -186,7 +186,7 @@ public interface GceClusterConfigOrBuilder
    *
    * 
    * Optional. The [Dataproc service
-   * account](https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/service-accounts#service_accounts_in_cloud_dataproc)
+   * account](https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/service-accounts#service_accounts_in_dataproc)
    * (also see [VM Data Plane
    * identity](https://cloud.google.com/dataproc/docs/concepts/iam/dataproc-principals#vm_service_account_data_plane_identity))
    * used by Dataproc cluster VM instances to access Google Cloud Platform
diff --git a/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/InstanceGroupConfig.java b/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/InstanceGroupConfig.java
index c28ec8fe..827fe6e9 100644
--- a/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/InstanceGroupConfig.java
+++ b/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/InstanceGroupConfig.java
@@ -42,6 +42,7 @@ private InstanceGroupConfig() {
     instanceNames_ = com.google.protobuf.LazyStringArrayList.EMPTY;
     imageUri_ = "";
     machineTypeUri_ = "";
+    preemptibility_ = 0;
     accelerators_ = java.util.Collections.emptyList();
     minCpuPlatform_ = "";
   }
@@ -163,6 +164,13 @@ private InstanceGroupConfig(
               minCpuPlatform_ = s;
               break;
             }
+          case 80:
+            {
+              int rawValue = input.readEnum();
+
+              preemptibility_ = rawValue;
+              break;
+            }
           default:
             {
               if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) {
@@ -203,6 +211,176 @@ public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
             com.google.cloud.dataproc.v1beta2.InstanceGroupConfig.Builder.class);
   }
 
+  /**
+   *
+   *
+   * 
+   * Controls the use of
+   * [preemptible instances]
+   * (https://cloud.google.com/compute/docs/instances/preemptible)
+   * within the group.
+   * 
+ * + * Protobuf enum {@code google.cloud.dataproc.v1beta2.InstanceGroupConfig.Preemptibility} + */ + public enum Preemptibility implements com.google.protobuf.ProtocolMessageEnum { + /** + * + * + *
+     * Preemptibility is unspecified, the system will choose the
+     * appropriate setting for each instance group.
+     * 
+ * + * PREEMPTIBILITY_UNSPECIFIED = 0; + */ + PREEMPTIBILITY_UNSPECIFIED(0), + /** + * + * + *
+     * Instances are non-preemptible.
+     * This option is allowed for all instance groups and is the only valid
+     * value for Master and Worker instance groups.
+     * 
+ * + * NON_PREEMPTIBLE = 1; + */ + NON_PREEMPTIBLE(1), + /** + * + * + *
+     * Instances are preemptible.
+     * This option is allowed only for secondary worker groups.
+     * 
+ * + * PREEMPTIBLE = 2; + */ + PREEMPTIBLE(2), + UNRECOGNIZED(-1), + ; + + /** + * + * + *
+     * Preemptibility is unspecified, the system will choose the
+     * appropriate setting for each instance group.
+     * 
+ * + * PREEMPTIBILITY_UNSPECIFIED = 0; + */ + public static final int PREEMPTIBILITY_UNSPECIFIED_VALUE = 0; + /** + * + * + *
+     * Instances are non-preemptible.
+     * This option is allowed for all instance groups and is the only valid
+     * value for Master and Worker instance groups.
+     * 
+ * + * NON_PREEMPTIBLE = 1; + */ + public static final int NON_PREEMPTIBLE_VALUE = 1; + /** + * + * + *
+     * Instances are preemptible.
+     * This option is allowed only for secondary worker groups.
+     * 
+ * + * PREEMPTIBLE = 2; + */ + public static final int PREEMPTIBLE_VALUE = 2; + + public final int getNumber() { + if (this == UNRECOGNIZED) { + throw new java.lang.IllegalArgumentException( + "Can't get the number of an unknown enum value."); + } + return value; + } + + /** + * @param value The numeric wire value of the corresponding enum entry. + * @return The enum associated with the given numeric wire value. + * @deprecated Use {@link #forNumber(int)} instead. + */ + @java.lang.Deprecated + public static Preemptibility valueOf(int value) { + return forNumber(value); + } + + /** + * @param value The numeric wire value of the corresponding enum entry. + * @return The enum associated with the given numeric wire value. + */ + public static Preemptibility forNumber(int value) { + switch (value) { + case 0: + return PREEMPTIBILITY_UNSPECIFIED; + case 1: + return NON_PREEMPTIBLE; + case 2: + return PREEMPTIBLE; + default: + return null; + } + } + + public static com.google.protobuf.Internal.EnumLiteMap internalGetValueMap() { + return internalValueMap; + } + + private static final com.google.protobuf.Internal.EnumLiteMap internalValueMap = + new com.google.protobuf.Internal.EnumLiteMap() { + public Preemptibility findValueByNumber(int number) { + return Preemptibility.forNumber(number); + } + }; + + public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { + if (this == UNRECOGNIZED) { + throw new java.lang.IllegalStateException( + "Can't get the descriptor of an unrecognized enum value."); + } + return getDescriptor().getValues().get(ordinal()); + } + + public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { + return getDescriptor(); + } + + public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() { + return com.google.cloud.dataproc.v1beta2.InstanceGroupConfig.getDescriptor() + .getEnumTypes() + .get(0); + } + + private static final Preemptibility[] VALUES = values(); + + public static Preemptibility valueOf(com.google.protobuf.Descriptors.EnumValueDescriptor desc) { + if (desc.getType() != getDescriptor()) { + throw new java.lang.IllegalArgumentException("EnumValueDescriptor is not for this type."); + } + if (desc.getIndex() == -1) { + return UNRECOGNIZED; + } + return VALUES[desc.getIndex()]; + } + + private final int value; + + private Preemptibility(int value) { + this.value = value; + } + + // @@protoc_insertion_point(enum_scope:google.cloud.dataproc.v1beta2.InstanceGroupConfig.Preemptibility) + } + public static final int NUM_INSTANCES_FIELD_NUMBER = 1; private int numInstances_; /** @@ -498,6 +676,57 @@ public boolean getIsPreemptible() { return isPreemptible_; } + public static final int PREEMPTIBILITY_FIELD_NUMBER = 10; + private int preemptibility_; + /** + * + * + *
+   * Optional. Specifies the preemptibility of the instance group.
+   * The default value for master and worker groups is
+   * `NON_PREEMPTIBLE`. This default cannot be changed.
+   * The default value for secondary instances is
+   * `PREEMPTIBLE`.
+   * 
+ * + * + * .google.cloud.dataproc.v1beta2.InstanceGroupConfig.Preemptibility preemptibility = 10 [(.google.api.field_behavior) = OPTIONAL]; + * + * + * @return The enum numeric value on the wire for preemptibility. + */ + @java.lang.Override + public int getPreemptibilityValue() { + return preemptibility_; + } + /** + * + * + *
+   * Optional. Specifies the preemptibility of the instance group.
+   * The default value for master and worker groups is
+   * `NON_PREEMPTIBLE`. This default cannot be changed.
+   * The default value for secondary instances is
+   * `PREEMPTIBLE`.
+   * 
+ * + * + * .google.cloud.dataproc.v1beta2.InstanceGroupConfig.Preemptibility preemptibility = 10 [(.google.api.field_behavior) = OPTIONAL]; + * + * + * @return The preemptibility. + */ + @java.lang.Override + public com.google.cloud.dataproc.v1beta2.InstanceGroupConfig.Preemptibility getPreemptibility() { + @SuppressWarnings("deprecation") + com.google.cloud.dataproc.v1beta2.InstanceGroupConfig.Preemptibility result = + com.google.cloud.dataproc.v1beta2.InstanceGroupConfig.Preemptibility.valueOf( + preemptibility_); + return result == null + ? com.google.cloud.dataproc.v1beta2.InstanceGroupConfig.Preemptibility.UNRECOGNIZED + : result; + } + public static final int MANAGED_GROUP_CONFIG_FIELD_NUMBER = 7; private com.google.cloud.dataproc.v1beta2.ManagedGroupConfig managedGroupConfig_; /** @@ -738,6 +967,12 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io if (!getMinCpuPlatformBytes().isEmpty()) { com.google.protobuf.GeneratedMessageV3.writeString(output, 9, minCpuPlatform_); } + if (preemptibility_ + != com.google.cloud.dataproc.v1beta2.InstanceGroupConfig.Preemptibility + .PREEMPTIBILITY_UNSPECIFIED + .getNumber()) { + output.writeEnum(10, preemptibility_); + } unknownFields.writeTo(output); } @@ -779,6 +1014,12 @@ public int getSerializedSize() { if (!getMinCpuPlatformBytes().isEmpty()) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(9, minCpuPlatform_); } + if (preemptibility_ + != com.google.cloud.dataproc.v1beta2.InstanceGroupConfig.Preemptibility + .PREEMPTIBILITY_UNSPECIFIED + .getNumber()) { + size += com.google.protobuf.CodedOutputStream.computeEnumSize(10, preemptibility_); + } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; @@ -804,6 +1045,7 @@ public boolean equals(final java.lang.Object obj) { if (!getDiskConfig().equals(other.getDiskConfig())) return false; } if (getIsPreemptible() != other.getIsPreemptible()) return false; + if (preemptibility_ != other.preemptibility_) return false; if (hasManagedGroupConfig() != other.hasManagedGroupConfig()) return false; if (hasManagedGroupConfig()) { if (!getManagedGroupConfig().equals(other.getManagedGroupConfig())) return false; @@ -837,6 +1079,8 @@ public int hashCode() { } hash = (37 * hash) + IS_PREEMPTIBLE_FIELD_NUMBER; hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(getIsPreemptible()); + hash = (37 * hash) + PREEMPTIBILITY_FIELD_NUMBER; + hash = (53 * hash) + preemptibility_; if (hasManagedGroupConfig()) { hash = (37 * hash) + MANAGED_GROUP_CONFIG_FIELD_NUMBER; hash = (53 * hash) + getManagedGroupConfig().hashCode(); @@ -1012,6 +1256,8 @@ public Builder clear() { } isPreemptible_ = false; + preemptibility_ = 0; + if (managedGroupConfigBuilder_ == null) { managedGroupConfig_ = null; } else { @@ -1068,6 +1314,7 @@ public com.google.cloud.dataproc.v1beta2.InstanceGroupConfig buildPartial() { result.diskConfig_ = diskConfigBuilder_.build(); } result.isPreemptible_ = isPreemptible_; + result.preemptibility_ = preemptibility_; if (managedGroupConfigBuilder_ == null) { result.managedGroupConfig_ = managedGroupConfig_; } else { @@ -1160,6 +1407,9 @@ public Builder mergeFrom(com.google.cloud.dataproc.v1beta2.InstanceGroupConfig o if (other.getIsPreemptible() != false) { setIsPreemptible(other.getIsPreemptible()); } + if (other.preemptibility_ != 0) { + setPreemptibilityValue(other.getPreemptibilityValue()); + } if (other.hasManagedGroupConfig()) { mergeManagedGroupConfig(other.getManagedGroupConfig()); } @@ -2029,6 +2279,132 @@ public Builder clearIsPreemptible() { return this; } + private int preemptibility_ = 0; + /** + * + * + *
+     * Optional. Specifies the preemptibility of the instance group.
+     * The default value for master and worker groups is
+     * `NON_PREEMPTIBLE`. This default cannot be changed.
+     * The default value for secondary instances is
+     * `PREEMPTIBLE`.
+     * 
+ * + * + * .google.cloud.dataproc.v1beta2.InstanceGroupConfig.Preemptibility preemptibility = 10 [(.google.api.field_behavior) = OPTIONAL]; + * + * + * @return The enum numeric value on the wire for preemptibility. + */ + @java.lang.Override + public int getPreemptibilityValue() { + return preemptibility_; + } + /** + * + * + *
+     * Optional. Specifies the preemptibility of the instance group.
+     * The default value for master and worker groups is
+     * `NON_PREEMPTIBLE`. This default cannot be changed.
+     * The default value for secondary instances is
+     * `PREEMPTIBLE`.
+     * 
+ * + * + * .google.cloud.dataproc.v1beta2.InstanceGroupConfig.Preemptibility preemptibility = 10 [(.google.api.field_behavior) = OPTIONAL]; + * + * + * @param value The enum numeric value on the wire for preemptibility to set. + * @return This builder for chaining. + */ + public Builder setPreemptibilityValue(int value) { + + preemptibility_ = value; + onChanged(); + return this; + } + /** + * + * + *
+     * Optional. Specifies the preemptibility of the instance group.
+     * The default value for master and worker groups is
+     * `NON_PREEMPTIBLE`. This default cannot be changed.
+     * The default value for secondary instances is
+     * `PREEMPTIBLE`.
+     * 
+ * + * + * .google.cloud.dataproc.v1beta2.InstanceGroupConfig.Preemptibility preemptibility = 10 [(.google.api.field_behavior) = OPTIONAL]; + * + * + * @return The preemptibility. + */ + @java.lang.Override + public com.google.cloud.dataproc.v1beta2.InstanceGroupConfig.Preemptibility + getPreemptibility() { + @SuppressWarnings("deprecation") + com.google.cloud.dataproc.v1beta2.InstanceGroupConfig.Preemptibility result = + com.google.cloud.dataproc.v1beta2.InstanceGroupConfig.Preemptibility.valueOf( + preemptibility_); + return result == null + ? com.google.cloud.dataproc.v1beta2.InstanceGroupConfig.Preemptibility.UNRECOGNIZED + : result; + } + /** + * + * + *
+     * Optional. Specifies the preemptibility of the instance group.
+     * The default value for master and worker groups is
+     * `NON_PREEMPTIBLE`. This default cannot be changed.
+     * The default value for secondary instances is
+     * `PREEMPTIBLE`.
+     * 
+ * + * + * .google.cloud.dataproc.v1beta2.InstanceGroupConfig.Preemptibility preemptibility = 10 [(.google.api.field_behavior) = OPTIONAL]; + * + * + * @param value The preemptibility to set. + * @return This builder for chaining. + */ + public Builder setPreemptibility( + com.google.cloud.dataproc.v1beta2.InstanceGroupConfig.Preemptibility value) { + if (value == null) { + throw new NullPointerException(); + } + + preemptibility_ = value.getNumber(); + onChanged(); + return this; + } + /** + * + * + *
+     * Optional. Specifies the preemptibility of the instance group.
+     * The default value for master and worker groups is
+     * `NON_PREEMPTIBLE`. This default cannot be changed.
+     * The default value for secondary instances is
+     * `PREEMPTIBLE`.
+     * 
+ * + * + * .google.cloud.dataproc.v1beta2.InstanceGroupConfig.Preemptibility preemptibility = 10 [(.google.api.field_behavior) = OPTIONAL]; + * + * + * @return This builder for chaining. + */ + public Builder clearPreemptibility() { + + preemptibility_ = 0; + onChanged(); + return this; + } + private com.google.cloud.dataproc.v1beta2.ManagedGroupConfig managedGroupConfig_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.dataproc.v1beta2.ManagedGroupConfig, diff --git a/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/InstanceGroupConfigOrBuilder.java b/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/InstanceGroupConfigOrBuilder.java index ae533db6..ddc45b5c 100644 --- a/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/InstanceGroupConfigOrBuilder.java +++ b/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/InstanceGroupConfigOrBuilder.java @@ -237,6 +237,43 @@ public interface InstanceGroupConfigOrBuilder */ boolean getIsPreemptible(); + /** + * + * + *
+   * Optional. Specifies the preemptibility of the instance group.
+   * The default value for master and worker groups is
+   * `NON_PREEMPTIBLE`. This default cannot be changed.
+   * The default value for secondary instances is
+   * `PREEMPTIBLE`.
+   * 
+ * + * + * .google.cloud.dataproc.v1beta2.InstanceGroupConfig.Preemptibility preemptibility = 10 [(.google.api.field_behavior) = OPTIONAL]; + * + * + * @return The enum numeric value on the wire for preemptibility. + */ + int getPreemptibilityValue(); + /** + * + * + *
+   * Optional. Specifies the preemptibility of the instance group.
+   * The default value for master and worker groups is
+   * `NON_PREEMPTIBLE`. This default cannot be changed.
+   * The default value for secondary instances is
+   * `PREEMPTIBLE`.
+   * 
+ * + * + * .google.cloud.dataproc.v1beta2.InstanceGroupConfig.Preemptibility preemptibility = 10 [(.google.api.field_behavior) = OPTIONAL]; + * + * + * @return The preemptibility. + */ + com.google.cloud.dataproc.v1beta2.InstanceGroupConfig.Preemptibility getPreemptibility(); + /** * * diff --git a/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/JobReference.java b/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/JobReference.java index 61c31c8b..7aeb0ea3 100644 --- a/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/JobReference.java +++ b/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/JobReference.java @@ -125,11 +125,11 @@ public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { * * *
-   * Required. The ID of the Google Cloud Platform project that the job
-   * belongs to.
+   * Optional. The ID of the Google Cloud Platform project that the job belongs to. If
+   * specified, must match the request project ID.
    * 
* - * string project_id = 1 [(.google.api.field_behavior) = REQUIRED]; + * string project_id = 1 [(.google.api.field_behavior) = OPTIONAL]; * * @return The projectId. */ @@ -149,11 +149,11 @@ public java.lang.String getProjectId() { * * *
-   * Required. The ID of the Google Cloud Platform project that the job
-   * belongs to.
+   * Optional. The ID of the Google Cloud Platform project that the job belongs to. If
+   * specified, must match the request project ID.
    * 
* - * string project_id = 1 [(.google.api.field_behavior) = REQUIRED]; + * string project_id = 1 [(.google.api.field_behavior) = OPTIONAL]; * * @return The bytes for projectId. */ @@ -562,11 +562,11 @@ public Builder mergeFrom( * * *
-     * Required. The ID of the Google Cloud Platform project that the job
-     * belongs to.
+     * Optional. The ID of the Google Cloud Platform project that the job belongs to. If
+     * specified, must match the request project ID.
      * 
* - * string project_id = 1 [(.google.api.field_behavior) = REQUIRED]; + * string project_id = 1 [(.google.api.field_behavior) = OPTIONAL]; * * @return The projectId. */ @@ -585,11 +585,11 @@ public java.lang.String getProjectId() { * * *
-     * Required. The ID of the Google Cloud Platform project that the job
-     * belongs to.
+     * Optional. The ID of the Google Cloud Platform project that the job belongs to. If
+     * specified, must match the request project ID.
      * 
* - * string project_id = 1 [(.google.api.field_behavior) = REQUIRED]; + * string project_id = 1 [(.google.api.field_behavior) = OPTIONAL]; * * @return The bytes for projectId. */ @@ -608,11 +608,11 @@ public com.google.protobuf.ByteString getProjectIdBytes() { * * *
-     * Required. The ID of the Google Cloud Platform project that the job
-     * belongs to.
+     * Optional. The ID of the Google Cloud Platform project that the job belongs to. If
+     * specified, must match the request project ID.
      * 
* - * string project_id = 1 [(.google.api.field_behavior) = REQUIRED]; + * string project_id = 1 [(.google.api.field_behavior) = OPTIONAL]; * * @param value The projectId to set. * @return This builder for chaining. @@ -630,11 +630,11 @@ public Builder setProjectId(java.lang.String value) { * * *
-     * Required. The ID of the Google Cloud Platform project that the job
-     * belongs to.
+     * Optional. The ID of the Google Cloud Platform project that the job belongs to. If
+     * specified, must match the request project ID.
      * 
* - * string project_id = 1 [(.google.api.field_behavior) = REQUIRED]; + * string project_id = 1 [(.google.api.field_behavior) = OPTIONAL]; * * @return This builder for chaining. */ @@ -648,11 +648,11 @@ public Builder clearProjectId() { * * *
-     * Required. The ID of the Google Cloud Platform project that the job
-     * belongs to.
+     * Optional. The ID of the Google Cloud Platform project that the job belongs to. If
+     * specified, must match the request project ID.
      * 
* - * string project_id = 1 [(.google.api.field_behavior) = REQUIRED]; + * string project_id = 1 [(.google.api.field_behavior) = OPTIONAL]; * * @param value The bytes for projectId to set. * @return This builder for chaining. diff --git a/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/JobReferenceOrBuilder.java b/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/JobReferenceOrBuilder.java index 46cbbf22..2bc8b9f8 100644 --- a/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/JobReferenceOrBuilder.java +++ b/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/JobReferenceOrBuilder.java @@ -27,11 +27,11 @@ public interface JobReferenceOrBuilder * * *
-   * Required. The ID of the Google Cloud Platform project that the job
-   * belongs to.
+   * Optional. The ID of the Google Cloud Platform project that the job belongs to. If
+   * specified, must match the request project ID.
    * 
* - * string project_id = 1 [(.google.api.field_behavior) = REQUIRED]; + * string project_id = 1 [(.google.api.field_behavior) = OPTIONAL]; * * @return The projectId. */ @@ -40,11 +40,11 @@ public interface JobReferenceOrBuilder * * *
-   * Required. The ID of the Google Cloud Platform project that the job
-   * belongs to.
+   * Optional. The ID of the Google Cloud Platform project that the job belongs to. If
+   * specified, must match the request project ID.
    * 
* - * string project_id = 1 [(.google.api.field_behavior) = REQUIRED]; + * string project_id = 1 [(.google.api.field_behavior) = OPTIONAL]; * * @return The bytes for projectId. */ diff --git a/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/JobsProto.java b/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/JobsProto.java index 5060ff89..08562a06 100644 --- a/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/JobsProto.java +++ b/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/JobsProto.java @@ -299,7 +299,7 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { + "DONE\020\005\022\t\n\005ERROR\020\006\022\023\n\017ATTEMPT_FAILURE\020\t\"H" + "\n\010Substate\022\017\n\013UNSPECIFIED\020\000\022\r\n\tSUBMITTED" + "\020\001\022\n\n\006QUEUED\020\002\022\020\n\014STALE_STATUS\020\003\"<\n\014JobR" - + "eference\022\027\n\nproject_id\030\001 \001(\tB\003\340A\002\022\023\n\006job" + + "eference\022\027\n\nproject_id\030\001 \001(\tB\003\340A\001\022\023\n\006job" + "_id\030\002 \001(\tB\003\340A\001\"\252\002\n\017YarnApplication\022\021\n\004na" + "me\030\001 \001(\tB\003\340A\003\022H\n\005state\030\002 \001(\01624.google.cl" + "oud.dataproc.v1beta2.YarnApplication.Sta" diff --git a/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/KerberosConfig.java b/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/KerberosConfig.java index e05bf0af..bf98f046 100644 --- a/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/KerberosConfig.java +++ b/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/KerberosConfig.java @@ -223,8 +223,8 @@ public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { * * *
-   * Optional. Flag to indicate whether to Kerberize the cluster (default: false). Set
-   * this field to true to enable Kerberos on a cluster.
+   * Optional. Flag to indicate whether to Kerberize the cluster (default:
+   * false). Set this field to true to enable Kerberos on a cluster.
    * 
* * bool enable_kerberos = 1 [(.google.api.field_behavior) = OPTIONAL]; @@ -1488,8 +1488,8 @@ public Builder mergeFrom( * * *
-     * Optional. Flag to indicate whether to Kerberize the cluster (default: false). Set
-     * this field to true to enable Kerberos on a cluster.
+     * Optional. Flag to indicate whether to Kerberize the cluster (default:
+     * false). Set this field to true to enable Kerberos on a cluster.
      * 
* * bool enable_kerberos = 1 [(.google.api.field_behavior) = OPTIONAL]; @@ -1504,8 +1504,8 @@ public boolean getEnableKerberos() { * * *
-     * Optional. Flag to indicate whether to Kerberize the cluster (default: false). Set
-     * this field to true to enable Kerberos on a cluster.
+     * Optional. Flag to indicate whether to Kerberize the cluster (default:
+     * false). Set this field to true to enable Kerberos on a cluster.
      * 
* * bool enable_kerberos = 1 [(.google.api.field_behavior) = OPTIONAL]; @@ -1523,8 +1523,8 @@ public Builder setEnableKerberos(boolean value) { * * *
-     * Optional. Flag to indicate whether to Kerberize the cluster (default: false). Set
-     * this field to true to enable Kerberos on a cluster.
+     * Optional. Flag to indicate whether to Kerberize the cluster (default:
+     * false). Set this field to true to enable Kerberos on a cluster.
      * 
* * bool enable_kerberos = 1 [(.google.api.field_behavior) = OPTIONAL]; diff --git a/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/KerberosConfigOrBuilder.java b/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/KerberosConfigOrBuilder.java index a947cb29..4f30a890 100644 --- a/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/KerberosConfigOrBuilder.java +++ b/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/KerberosConfigOrBuilder.java @@ -27,8 +27,8 @@ public interface KerberosConfigOrBuilder * * *
-   * Optional. Flag to indicate whether to Kerberize the cluster (default: false). Set
-   * this field to true to enable Kerberos on a cluster.
+   * Optional. Flag to indicate whether to Kerberize the cluster (default:
+   * false). Set this field to true to enable Kerberos on a cluster.
    * 
* * bool enable_kerberos = 1 [(.google.api.field_behavior) = OPTIONAL]; diff --git a/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/LifecycleConfig.java b/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/LifecycleConfig.java index 9f24fef7..bbef5c86 100644 --- a/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/LifecycleConfig.java +++ b/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/LifecycleConfig.java @@ -275,7 +275,8 @@ public com.google.protobuf.DurationOrBuilder getIdleDeleteTtlOrBuilder() { * * *
-   * Optional. The time when cluster will be auto-deleted. (see JSON representation of
+   * Optional. The time when cluster will be auto-deleted. (see JSON
+   * representation of
    * [Timestamp](https://developers.google.com/protocol-buffers/docs/proto3#json)).
    * 
* @@ -293,7 +294,8 @@ public boolean hasAutoDeleteTime() { * * *
-   * Optional. The time when cluster will be auto-deleted. (see JSON representation of
+   * Optional. The time when cluster will be auto-deleted. (see JSON
+   * representation of
    * [Timestamp](https://developers.google.com/protocol-buffers/docs/proto3#json)).
    * 
* @@ -314,7 +316,8 @@ public com.google.protobuf.Timestamp getAutoDeleteTime() { * * *
-   * Optional. The time when cluster will be auto-deleted. (see JSON representation of
+   * Optional. The time when cluster will be auto-deleted. (see JSON
+   * representation of
    * [Timestamp](https://developers.google.com/protocol-buffers/docs/proto3#json)).
    * 
* @@ -1148,7 +1151,8 @@ public com.google.protobuf.DurationOrBuilder getIdleDeleteTtlOrBuilder() { * * *
-     * Optional. The time when cluster will be auto-deleted. (see JSON representation of
+     * Optional. The time when cluster will be auto-deleted. (see JSON
+     * representation of
      * [Timestamp](https://developers.google.com/protocol-buffers/docs/proto3#json)).
      * 
* @@ -1166,7 +1170,8 @@ public boolean hasAutoDeleteTime() { * * *
-     * Optional. The time when cluster will be auto-deleted. (see JSON representation of
+     * Optional. The time when cluster will be auto-deleted. (see JSON
+     * representation of
      * [Timestamp](https://developers.google.com/protocol-buffers/docs/proto3#json)).
      * 
* @@ -1194,7 +1199,8 @@ public com.google.protobuf.Timestamp getAutoDeleteTime() { * * *
-     * Optional. The time when cluster will be auto-deleted. (see JSON representation of
+     * Optional. The time when cluster will be auto-deleted. (see JSON
+     * representation of
      * [Timestamp](https://developers.google.com/protocol-buffers/docs/proto3#json)).
      * 
* @@ -1219,7 +1225,8 @@ public Builder setAutoDeleteTime(com.google.protobuf.Timestamp value) { * * *
-     * Optional. The time when cluster will be auto-deleted. (see JSON representation of
+     * Optional. The time when cluster will be auto-deleted. (see JSON
+     * representation of
      * [Timestamp](https://developers.google.com/protocol-buffers/docs/proto3#json)).
      * 
* @@ -1241,7 +1248,8 @@ public Builder setAutoDeleteTime(com.google.protobuf.Timestamp.Builder builderFo * * *
-     * Optional. The time when cluster will be auto-deleted. (see JSON representation of
+     * Optional. The time when cluster will be auto-deleted. (see JSON
+     * representation of
      * [Timestamp](https://developers.google.com/protocol-buffers/docs/proto3#json)).
      * 
* @@ -1273,7 +1281,8 @@ public Builder mergeAutoDeleteTime(com.google.protobuf.Timestamp value) { * * *
-     * Optional. The time when cluster will be auto-deleted. (see JSON representation of
+     * Optional. The time when cluster will be auto-deleted. (see JSON
+     * representation of
      * [Timestamp](https://developers.google.com/protocol-buffers/docs/proto3#json)).
      * 
* @@ -1301,7 +1310,8 @@ public Builder clearAutoDeleteTime() { * * *
-     * Optional. The time when cluster will be auto-deleted. (see JSON representation of
+     * Optional. The time when cluster will be auto-deleted. (see JSON
+     * representation of
      * [Timestamp](https://developers.google.com/protocol-buffers/docs/proto3#json)).
      * 
* @@ -1316,7 +1326,8 @@ public com.google.protobuf.Timestamp.Builder getAutoDeleteTimeBuilder() { * * *
-     * Optional. The time when cluster will be auto-deleted. (see JSON representation of
+     * Optional. The time when cluster will be auto-deleted. (see JSON
+     * representation of
      * [Timestamp](https://developers.google.com/protocol-buffers/docs/proto3#json)).
      * 
* @@ -1339,7 +1350,8 @@ public com.google.protobuf.TimestampOrBuilder getAutoDeleteTimeOrBuilder() { * * *
-     * Optional. The time when cluster will be auto-deleted. (see JSON representation of
+     * Optional. The time when cluster will be auto-deleted. (see JSON
+     * representation of
      * [Timestamp](https://developers.google.com/protocol-buffers/docs/proto3#json)).
      * 
* diff --git a/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/LifecycleConfigOrBuilder.java b/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/LifecycleConfigOrBuilder.java index 047450d6..65d5a5b5 100644 --- a/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/LifecycleConfigOrBuilder.java +++ b/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/LifecycleConfigOrBuilder.java @@ -77,7 +77,8 @@ public interface LifecycleConfigOrBuilder * * *
-   * Optional. The time when cluster will be auto-deleted. (see JSON representation of
+   * Optional. The time when cluster will be auto-deleted. (see JSON
+   * representation of
    * [Timestamp](https://developers.google.com/protocol-buffers/docs/proto3#json)).
    * 
* @@ -92,7 +93,8 @@ public interface LifecycleConfigOrBuilder * * *
-   * Optional. The time when cluster will be auto-deleted. (see JSON representation of
+   * Optional. The time when cluster will be auto-deleted. (see JSON
+   * representation of
    * [Timestamp](https://developers.google.com/protocol-buffers/docs/proto3#json)).
    * 
* @@ -107,7 +109,8 @@ public interface LifecycleConfigOrBuilder * * *
-   * Optional. The time when cluster will be auto-deleted. (see JSON representation of
+   * Optional. The time when cluster will be auto-deleted. (see JSON
+   * representation of
    * [Timestamp](https://developers.google.com/protocol-buffers/docs/proto3#json)).
    * 
* diff --git a/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/LocationName.java b/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/LocationName.java index 84ba3611..59696061 100644 --- a/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/LocationName.java +++ b/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/LocationName.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -23,19 +23,29 @@ import java.util.ArrayList; import java.util.List; import java.util.Map; +import java.util.Objects; +import javax.annotation.Generated; -/** AUTO-GENERATED DOCUMENTATION AND CLASS */ -@javax.annotation.Generated("by GAPIC protoc plugin") +// AUTO-GENERATED DOCUMENTATION AND CLASS. +@Generated("by gapic-generator-java") public class LocationName implements ResourceName { - - private static final PathTemplate PATH_TEMPLATE = + private static final PathTemplate PROJECT_LOCATION = PathTemplate.createWithoutUrlEncoding("projects/{project}/locations/{location}"); - private volatile Map fieldValuesMap; - private final String project; private final String location; + @Deprecated + protected LocationName() { + project = null; + location = null; + } + + private LocationName(Builder builder) { + project = Preconditions.checkNotNull(builder.getProject()); + location = Preconditions.checkNotNull(builder.getLocation()); + } + public String getProject() { return project; } @@ -52,11 +62,6 @@ public Builder toBuilder() { return new Builder(this); } - private LocationName(Builder builder) { - project = Preconditions.checkNotNull(builder.getProject()); - location = Preconditions.checkNotNull(builder.getLocation()); - } - public static LocationName of(String project, String location) { return newBuilder().setProject(project).setLocation(location).build(); } @@ -70,7 +75,7 @@ public static LocationName parse(String formattedString) { return null; } Map matchMap = - PATH_TEMPLATE.validatedMatch( + PROJECT_LOCATION.validatedMatch( formattedString, "LocationName.parse: formattedString not in valid format"); return of(matchMap.get("project"), matchMap.get("location")); } @@ -84,7 +89,7 @@ public static List parseList(List formattedStrings) { } public static List toStringList(List values) { - List list = new ArrayList(values.size()); + List list = new ArrayList<>(values.size()); for (LocationName value : values) { if (value == null) { list.add(""); @@ -96,16 +101,21 @@ public static List toStringList(List values) { } public static boolean isParsableFrom(String formattedString) { - return PATH_TEMPLATE.matches(formattedString); + return PROJECT_LOCATION.matches(formattedString); } + @Override public Map getFieldValuesMap() { if (fieldValuesMap == null) { synchronized (this) { if (fieldValuesMap == null) { ImmutableMap.Builder fieldMapBuilder = ImmutableMap.builder(); - fieldMapBuilder.put("project", project); - fieldMapBuilder.put("location", location); + if (project != null) { + fieldMapBuilder.put("project", project); + } + if (location != null) { + fieldMapBuilder.put("location", location); + } fieldValuesMap = fieldMapBuilder.build(); } } @@ -119,15 +129,39 @@ public String getFieldValue(String fieldName) { @Override public String toString() { - return PATH_TEMPLATE.instantiate("project", project, "location", location); + return PROJECT_LOCATION.instantiate("project", project, "location", location); } - /** Builder for LocationName. */ - public static class Builder { + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o != null || getClass() == o.getClass()) { + LocationName that = ((LocationName) o); + return Objects.equals(this.project, that.project) + && Objects.equals(this.location, that.location); + } + return false; + } + @Override + public int hashCode() { + int h = 1; + h *= 1000003; + h ^= Objects.hashCode(project); + h *= 1000003; + h ^= Objects.hashCode(location); + return h; + } + + /** Builder for projects/{project}/locations/{location}. */ + public static class Builder { private String project; private String location; + protected Builder() {} + public String getProject() { return project; } @@ -146,8 +180,6 @@ public Builder setLocation(String location) { return this; } - private Builder() {} - private Builder(LocationName locationName) { project = locationName.project; location = locationName.location; @@ -157,26 +189,4 @@ public LocationName build() { return new LocationName(this); } } - - @Override - public boolean equals(Object o) { - if (o == this) { - return true; - } - if (o instanceof LocationName) { - LocationName that = (LocationName) o; - return (this.project.equals(that.project)) && (this.location.equals(that.location)); - } - return false; - } - - @Override - public int hashCode() { - int h = 1; - h *= 1000003; - h ^= project.hashCode(); - h *= 1000003; - h ^= location.hashCode(); - return h; - } } diff --git a/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/OrderedJob.java b/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/OrderedJob.java index 6cde60d6..2ee2783a 100644 --- a/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/OrderedJob.java +++ b/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/OrderedJob.java @@ -369,8 +369,8 @@ public JobTypeCase getJobTypeCase() { * within the template. * The step id is used as prefix for job id, as job * `goog-dataproc-workflow-step-id` label, and in - * [prerequisiteStepIds][google.cloud.dataproc.v1beta2.OrderedJob.prerequisite_step_ids] field from other - * steps. + * [prerequisiteStepIds][google.cloud.dataproc.v1beta2.OrderedJob.prerequisite_step_ids] + * field from other steps. * The id must contain only letters (a-z, A-Z), numbers (0-9), * underscores (_), and hyphens (-). Cannot begin or end with underscore * or hyphen. Must consist of between 3 and 50 characters. @@ -400,8 +400,8 @@ public java.lang.String getStepId() { * within the template. * The step id is used as prefix for job id, as job * `goog-dataproc-workflow-step-id` label, and in - * [prerequisiteStepIds][google.cloud.dataproc.v1beta2.OrderedJob.prerequisite_step_ids] field from other - * steps. + * [prerequisiteStepIds][google.cloud.dataproc.v1beta2.OrderedJob.prerequisite_step_ids] + * field from other steps. * The id must contain only letters (a-z, A-Z), numbers (0-9), * underscores (_), and hyphens (-). Cannot begin or end with underscore * or hyphen. Must consist of between 3 and 50 characters. @@ -426,7 +426,15 @@ public com.google.protobuf.ByteString getStepIdBytes() { public static final int HADOOP_JOB_FIELD_NUMBER = 2; /** - * .google.cloud.dataproc.v1beta2.HadoopJob hadoop_job = 2; + * + * + *
+   * Optional. Job is a Hadoop job.
+   * 
+ * + * + * .google.cloud.dataproc.v1beta2.HadoopJob hadoop_job = 2 [(.google.api.field_behavior) = OPTIONAL]; + * * * @return Whether the hadoopJob field is set. */ @@ -435,7 +443,15 @@ public boolean hasHadoopJob() { return jobTypeCase_ == 2; } /** - * .google.cloud.dataproc.v1beta2.HadoopJob hadoop_job = 2; + * + * + *
+   * Optional. Job is a Hadoop job.
+   * 
+ * + * + * .google.cloud.dataproc.v1beta2.HadoopJob hadoop_job = 2 [(.google.api.field_behavior) = OPTIONAL]; + * * * @return The hadoopJob. */ @@ -446,7 +462,17 @@ public com.google.cloud.dataproc.v1beta2.HadoopJob getHadoopJob() { } return com.google.cloud.dataproc.v1beta2.HadoopJob.getDefaultInstance(); } - /** .google.cloud.dataproc.v1beta2.HadoopJob hadoop_job = 2; */ + /** + * + * + *
+   * Optional. Job is a Hadoop job.
+   * 
+ * + * + * .google.cloud.dataproc.v1beta2.HadoopJob hadoop_job = 2 [(.google.api.field_behavior) = OPTIONAL]; + * + */ @java.lang.Override public com.google.cloud.dataproc.v1beta2.HadoopJobOrBuilder getHadoopJobOrBuilder() { if (jobTypeCase_ == 2) { @@ -457,7 +483,15 @@ public com.google.cloud.dataproc.v1beta2.HadoopJobOrBuilder getHadoopJobOrBuilde public static final int SPARK_JOB_FIELD_NUMBER = 3; /** - * .google.cloud.dataproc.v1beta2.SparkJob spark_job = 3; + * + * + *
+   * Optional. Job is a Spark job.
+   * 
+ * + * + * .google.cloud.dataproc.v1beta2.SparkJob spark_job = 3 [(.google.api.field_behavior) = OPTIONAL]; + * * * @return Whether the sparkJob field is set. */ @@ -466,7 +500,15 @@ public boolean hasSparkJob() { return jobTypeCase_ == 3; } /** - * .google.cloud.dataproc.v1beta2.SparkJob spark_job = 3; + * + * + *
+   * Optional. Job is a Spark job.
+   * 
+ * + * + * .google.cloud.dataproc.v1beta2.SparkJob spark_job = 3 [(.google.api.field_behavior) = OPTIONAL]; + * * * @return The sparkJob. */ @@ -477,7 +519,17 @@ public com.google.cloud.dataproc.v1beta2.SparkJob getSparkJob() { } return com.google.cloud.dataproc.v1beta2.SparkJob.getDefaultInstance(); } - /** .google.cloud.dataproc.v1beta2.SparkJob spark_job = 3; */ + /** + * + * + *
+   * Optional. Job is a Spark job.
+   * 
+ * + * + * .google.cloud.dataproc.v1beta2.SparkJob spark_job = 3 [(.google.api.field_behavior) = OPTIONAL]; + * + */ @java.lang.Override public com.google.cloud.dataproc.v1beta2.SparkJobOrBuilder getSparkJobOrBuilder() { if (jobTypeCase_ == 3) { @@ -488,7 +540,15 @@ public com.google.cloud.dataproc.v1beta2.SparkJobOrBuilder getSparkJobOrBuilder( public static final int PYSPARK_JOB_FIELD_NUMBER = 4; /** - * .google.cloud.dataproc.v1beta2.PySparkJob pyspark_job = 4; + * + * + *
+   * Optional. Job is a PySpark job.
+   * 
+ * + * + * .google.cloud.dataproc.v1beta2.PySparkJob pyspark_job = 4 [(.google.api.field_behavior) = OPTIONAL]; + * * * @return Whether the pysparkJob field is set. */ @@ -497,7 +557,15 @@ public boolean hasPysparkJob() { return jobTypeCase_ == 4; } /** - * .google.cloud.dataproc.v1beta2.PySparkJob pyspark_job = 4; + * + * + *
+   * Optional. Job is a PySpark job.
+   * 
+ * + * + * .google.cloud.dataproc.v1beta2.PySparkJob pyspark_job = 4 [(.google.api.field_behavior) = OPTIONAL]; + * * * @return The pysparkJob. */ @@ -508,7 +576,17 @@ public com.google.cloud.dataproc.v1beta2.PySparkJob getPysparkJob() { } return com.google.cloud.dataproc.v1beta2.PySparkJob.getDefaultInstance(); } - /** .google.cloud.dataproc.v1beta2.PySparkJob pyspark_job = 4; */ + /** + * + * + *
+   * Optional. Job is a PySpark job.
+   * 
+ * + * + * .google.cloud.dataproc.v1beta2.PySparkJob pyspark_job = 4 [(.google.api.field_behavior) = OPTIONAL]; + * + */ @java.lang.Override public com.google.cloud.dataproc.v1beta2.PySparkJobOrBuilder getPysparkJobOrBuilder() { if (jobTypeCase_ == 4) { @@ -519,7 +597,15 @@ public com.google.cloud.dataproc.v1beta2.PySparkJobOrBuilder getPysparkJobOrBuil public static final int HIVE_JOB_FIELD_NUMBER = 5; /** - * .google.cloud.dataproc.v1beta2.HiveJob hive_job = 5; + * + * + *
+   * Optional. Job is a Hive job.
+   * 
+ * + * + * .google.cloud.dataproc.v1beta2.HiveJob hive_job = 5 [(.google.api.field_behavior) = OPTIONAL]; + * * * @return Whether the hiveJob field is set. */ @@ -528,7 +614,15 @@ public boolean hasHiveJob() { return jobTypeCase_ == 5; } /** - * .google.cloud.dataproc.v1beta2.HiveJob hive_job = 5; + * + * + *
+   * Optional. Job is a Hive job.
+   * 
+ * + * + * .google.cloud.dataproc.v1beta2.HiveJob hive_job = 5 [(.google.api.field_behavior) = OPTIONAL]; + * * * @return The hiveJob. */ @@ -539,7 +633,17 @@ public com.google.cloud.dataproc.v1beta2.HiveJob getHiveJob() { } return com.google.cloud.dataproc.v1beta2.HiveJob.getDefaultInstance(); } - /** .google.cloud.dataproc.v1beta2.HiveJob hive_job = 5; */ + /** + * + * + *
+   * Optional. Job is a Hive job.
+   * 
+ * + * + * .google.cloud.dataproc.v1beta2.HiveJob hive_job = 5 [(.google.api.field_behavior) = OPTIONAL]; + * + */ @java.lang.Override public com.google.cloud.dataproc.v1beta2.HiveJobOrBuilder getHiveJobOrBuilder() { if (jobTypeCase_ == 5) { @@ -550,7 +654,15 @@ public com.google.cloud.dataproc.v1beta2.HiveJobOrBuilder getHiveJobOrBuilder() public static final int PIG_JOB_FIELD_NUMBER = 6; /** - * .google.cloud.dataproc.v1beta2.PigJob pig_job = 6; + * + * + *
+   * Optional. Job is a Pig job.
+   * 
+ * + * + * .google.cloud.dataproc.v1beta2.PigJob pig_job = 6 [(.google.api.field_behavior) = OPTIONAL]; + * * * @return Whether the pigJob field is set. */ @@ -559,7 +671,15 @@ public boolean hasPigJob() { return jobTypeCase_ == 6; } /** - * .google.cloud.dataproc.v1beta2.PigJob pig_job = 6; + * + * + *
+   * Optional. Job is a Pig job.
+   * 
+ * + * + * .google.cloud.dataproc.v1beta2.PigJob pig_job = 6 [(.google.api.field_behavior) = OPTIONAL]; + * * * @return The pigJob. */ @@ -570,7 +690,17 @@ public com.google.cloud.dataproc.v1beta2.PigJob getPigJob() { } return com.google.cloud.dataproc.v1beta2.PigJob.getDefaultInstance(); } - /** .google.cloud.dataproc.v1beta2.PigJob pig_job = 6; */ + /** + * + * + *
+   * Optional. Job is a Pig job.
+   * 
+ * + * + * .google.cloud.dataproc.v1beta2.PigJob pig_job = 6 [(.google.api.field_behavior) = OPTIONAL]; + * + */ @java.lang.Override public com.google.cloud.dataproc.v1beta2.PigJobOrBuilder getPigJobOrBuilder() { if (jobTypeCase_ == 6) { @@ -584,10 +714,12 @@ public com.google.cloud.dataproc.v1beta2.PigJobOrBuilder getPigJobOrBuilder() { * * *
-   * Spark R job
+   * Optional. Job is a SparkR job.
    * 
* - * .google.cloud.dataproc.v1beta2.SparkRJob spark_r_job = 11; + * + * .google.cloud.dataproc.v1beta2.SparkRJob spark_r_job = 11 [(.google.api.field_behavior) = OPTIONAL]; + * * * @return Whether the sparkRJob field is set. */ @@ -599,10 +731,12 @@ public boolean hasSparkRJob() { * * *
-   * Spark R job
+   * Optional. Job is a SparkR job.
    * 
* - * .google.cloud.dataproc.v1beta2.SparkRJob spark_r_job = 11; + * + * .google.cloud.dataproc.v1beta2.SparkRJob spark_r_job = 11 [(.google.api.field_behavior) = OPTIONAL]; + * * * @return The sparkRJob. */ @@ -617,10 +751,12 @@ public com.google.cloud.dataproc.v1beta2.SparkRJob getSparkRJob() { * * *
-   * Spark R job
+   * Optional. Job is a SparkR job.
    * 
* - * .google.cloud.dataproc.v1beta2.SparkRJob spark_r_job = 11; + * + * .google.cloud.dataproc.v1beta2.SparkRJob spark_r_job = 11 [(.google.api.field_behavior) = OPTIONAL]; + * */ @java.lang.Override public com.google.cloud.dataproc.v1beta2.SparkRJobOrBuilder getSparkRJobOrBuilder() { @@ -632,7 +768,15 @@ public com.google.cloud.dataproc.v1beta2.SparkRJobOrBuilder getSparkRJobOrBuilde public static final int SPARK_SQL_JOB_FIELD_NUMBER = 7; /** - * .google.cloud.dataproc.v1beta2.SparkSqlJob spark_sql_job = 7; + * + * + *
+   * Optional. Job is a SparkSql job.
+   * 
+ * + * + * .google.cloud.dataproc.v1beta2.SparkSqlJob spark_sql_job = 7 [(.google.api.field_behavior) = OPTIONAL]; + * * * @return Whether the sparkSqlJob field is set. */ @@ -641,7 +785,15 @@ public boolean hasSparkSqlJob() { return jobTypeCase_ == 7; } /** - * .google.cloud.dataproc.v1beta2.SparkSqlJob spark_sql_job = 7; + * + * + *
+   * Optional. Job is a SparkSql job.
+   * 
+ * + * + * .google.cloud.dataproc.v1beta2.SparkSqlJob spark_sql_job = 7 [(.google.api.field_behavior) = OPTIONAL]; + * * * @return The sparkSqlJob. */ @@ -652,7 +804,17 @@ public com.google.cloud.dataproc.v1beta2.SparkSqlJob getSparkSqlJob() { } return com.google.cloud.dataproc.v1beta2.SparkSqlJob.getDefaultInstance(); } - /** .google.cloud.dataproc.v1beta2.SparkSqlJob spark_sql_job = 7; */ + /** + * + * + *
+   * Optional. Job is a SparkSql job.
+   * 
+ * + * + * .google.cloud.dataproc.v1beta2.SparkSqlJob spark_sql_job = 7 [(.google.api.field_behavior) = OPTIONAL]; + * + */ @java.lang.Override public com.google.cloud.dataproc.v1beta2.SparkSqlJobOrBuilder getSparkSqlJobOrBuilder() { if (jobTypeCase_ == 7) { @@ -666,10 +828,12 @@ public com.google.cloud.dataproc.v1beta2.SparkSqlJobOrBuilder getSparkSqlJobOrBu * * *
-   * Presto job
+   * Optional. Job is a Presto job.
    * 
* - * .google.cloud.dataproc.v1beta2.PrestoJob presto_job = 12; + * + * .google.cloud.dataproc.v1beta2.PrestoJob presto_job = 12 [(.google.api.field_behavior) = OPTIONAL]; + * * * @return Whether the prestoJob field is set. */ @@ -681,10 +845,12 @@ public boolean hasPrestoJob() { * * *
-   * Presto job
+   * Optional. Job is a Presto job.
    * 
* - * .google.cloud.dataproc.v1beta2.PrestoJob presto_job = 12; + * + * .google.cloud.dataproc.v1beta2.PrestoJob presto_job = 12 [(.google.api.field_behavior) = OPTIONAL]; + * * * @return The prestoJob. */ @@ -699,10 +865,12 @@ public com.google.cloud.dataproc.v1beta2.PrestoJob getPrestoJob() { * * *
-   * Presto job
+   * Optional. Job is a Presto job.
    * 
* - * .google.cloud.dataproc.v1beta2.PrestoJob presto_job = 12; + * + * .google.cloud.dataproc.v1beta2.PrestoJob presto_job = 12 [(.google.api.field_behavior) = OPTIONAL]; + * */ @java.lang.Override public com.google.cloud.dataproc.v1beta2.PrestoJobOrBuilder getPrestoJobOrBuilder() { @@ -1636,8 +1804,8 @@ public Builder clearJobType() { * within the template. * The step id is used as prefix for job id, as job * `goog-dataproc-workflow-step-id` label, and in - * [prerequisiteStepIds][google.cloud.dataproc.v1beta2.OrderedJob.prerequisite_step_ids] field from other - * steps. + * [prerequisiteStepIds][google.cloud.dataproc.v1beta2.OrderedJob.prerequisite_step_ids] + * field from other steps. * The id must contain only letters (a-z, A-Z), numbers (0-9), * underscores (_), and hyphens (-). Cannot begin or end with underscore * or hyphen. Must consist of between 3 and 50 characters. @@ -1666,8 +1834,8 @@ public java.lang.String getStepId() { * within the template. * The step id is used as prefix for job id, as job * `goog-dataproc-workflow-step-id` label, and in - * [prerequisiteStepIds][google.cloud.dataproc.v1beta2.OrderedJob.prerequisite_step_ids] field from other - * steps. + * [prerequisiteStepIds][google.cloud.dataproc.v1beta2.OrderedJob.prerequisite_step_ids] + * field from other steps. * The id must contain only letters (a-z, A-Z), numbers (0-9), * underscores (_), and hyphens (-). Cannot begin or end with underscore * or hyphen. Must consist of between 3 and 50 characters. @@ -1696,8 +1864,8 @@ public com.google.protobuf.ByteString getStepIdBytes() { * within the template. * The step id is used as prefix for job id, as job * `goog-dataproc-workflow-step-id` label, and in - * [prerequisiteStepIds][google.cloud.dataproc.v1beta2.OrderedJob.prerequisite_step_ids] field from other - * steps. + * [prerequisiteStepIds][google.cloud.dataproc.v1beta2.OrderedJob.prerequisite_step_ids] + * field from other steps. * The id must contain only letters (a-z, A-Z), numbers (0-9), * underscores (_), and hyphens (-). Cannot begin or end with underscore * or hyphen. Must consist of between 3 and 50 characters. @@ -1725,8 +1893,8 @@ public Builder setStepId(java.lang.String value) { * within the template. * The step id is used as prefix for job id, as job * `goog-dataproc-workflow-step-id` label, and in - * [prerequisiteStepIds][google.cloud.dataproc.v1beta2.OrderedJob.prerequisite_step_ids] field from other - * steps. + * [prerequisiteStepIds][google.cloud.dataproc.v1beta2.OrderedJob.prerequisite_step_ids] + * field from other steps. * The id must contain only letters (a-z, A-Z), numbers (0-9), * underscores (_), and hyphens (-). Cannot begin or end with underscore * or hyphen. Must consist of between 3 and 50 characters. @@ -1750,8 +1918,8 @@ public Builder clearStepId() { * within the template. * The step id is used as prefix for job id, as job * `goog-dataproc-workflow-step-id` label, and in - * [prerequisiteStepIds][google.cloud.dataproc.v1beta2.OrderedJob.prerequisite_step_ids] field from other - * steps. + * [prerequisiteStepIds][google.cloud.dataproc.v1beta2.OrderedJob.prerequisite_step_ids] + * field from other steps. * The id must contain only letters (a-z, A-Z), numbers (0-9), * underscores (_), and hyphens (-). Cannot begin or end with underscore * or hyphen. Must consist of between 3 and 50 characters. @@ -1779,7 +1947,15 @@ public Builder setStepIdBytes(com.google.protobuf.ByteString value) { com.google.cloud.dataproc.v1beta2.HadoopJobOrBuilder> hadoopJobBuilder_; /** - * .google.cloud.dataproc.v1beta2.HadoopJob hadoop_job = 2; + * + * + *
+     * Optional. Job is a Hadoop job.
+     * 
+ * + * + * .google.cloud.dataproc.v1beta2.HadoopJob hadoop_job = 2 [(.google.api.field_behavior) = OPTIONAL]; + * * * @return Whether the hadoopJob field is set. */ @@ -1788,7 +1964,15 @@ public boolean hasHadoopJob() { return jobTypeCase_ == 2; } /** - * .google.cloud.dataproc.v1beta2.HadoopJob hadoop_job = 2; + * + * + *
+     * Optional. Job is a Hadoop job.
+     * 
+ * + * + * .google.cloud.dataproc.v1beta2.HadoopJob hadoop_job = 2 [(.google.api.field_behavior) = OPTIONAL]; + * * * @return The hadoopJob. */ @@ -1806,7 +1990,17 @@ public com.google.cloud.dataproc.v1beta2.HadoopJob getHadoopJob() { return com.google.cloud.dataproc.v1beta2.HadoopJob.getDefaultInstance(); } } - /** .google.cloud.dataproc.v1beta2.HadoopJob hadoop_job = 2; */ + /** + * + * + *
+     * Optional. Job is a Hadoop job.
+     * 
+ * + * + * .google.cloud.dataproc.v1beta2.HadoopJob hadoop_job = 2 [(.google.api.field_behavior) = OPTIONAL]; + * + */ public Builder setHadoopJob(com.google.cloud.dataproc.v1beta2.HadoopJob value) { if (hadoopJobBuilder_ == null) { if (value == null) { @@ -1820,7 +2014,17 @@ public Builder setHadoopJob(com.google.cloud.dataproc.v1beta2.HadoopJob value) { jobTypeCase_ = 2; return this; } - /** .google.cloud.dataproc.v1beta2.HadoopJob hadoop_job = 2; */ + /** + * + * + *
+     * Optional. Job is a Hadoop job.
+     * 
+ * + * + * .google.cloud.dataproc.v1beta2.HadoopJob hadoop_job = 2 [(.google.api.field_behavior) = OPTIONAL]; + * + */ public Builder setHadoopJob( com.google.cloud.dataproc.v1beta2.HadoopJob.Builder builderForValue) { if (hadoopJobBuilder_ == null) { @@ -1832,7 +2036,17 @@ public Builder setHadoopJob( jobTypeCase_ = 2; return this; } - /** .google.cloud.dataproc.v1beta2.HadoopJob hadoop_job = 2; */ + /** + * + * + *
+     * Optional. Job is a Hadoop job.
+     * 
+ * + * + * .google.cloud.dataproc.v1beta2.HadoopJob hadoop_job = 2 [(.google.api.field_behavior) = OPTIONAL]; + * + */ public Builder mergeHadoopJob(com.google.cloud.dataproc.v1beta2.HadoopJob value) { if (hadoopJobBuilder_ == null) { if (jobTypeCase_ == 2 @@ -1855,7 +2069,17 @@ public Builder mergeHadoopJob(com.google.cloud.dataproc.v1beta2.HadoopJob value) jobTypeCase_ = 2; return this; } - /** .google.cloud.dataproc.v1beta2.HadoopJob hadoop_job = 2; */ + /** + * + * + *
+     * Optional. Job is a Hadoop job.
+     * 
+ * + * + * .google.cloud.dataproc.v1beta2.HadoopJob hadoop_job = 2 [(.google.api.field_behavior) = OPTIONAL]; + * + */ public Builder clearHadoopJob() { if (hadoopJobBuilder_ == null) { if (jobTypeCase_ == 2) { @@ -1872,11 +2096,31 @@ public Builder clearHadoopJob() { } return this; } - /** .google.cloud.dataproc.v1beta2.HadoopJob hadoop_job = 2; */ + /** + * + * + *
+     * Optional. Job is a Hadoop job.
+     * 
+ * + * + * .google.cloud.dataproc.v1beta2.HadoopJob hadoop_job = 2 [(.google.api.field_behavior) = OPTIONAL]; + * + */ public com.google.cloud.dataproc.v1beta2.HadoopJob.Builder getHadoopJobBuilder() { return getHadoopJobFieldBuilder().getBuilder(); } - /** .google.cloud.dataproc.v1beta2.HadoopJob hadoop_job = 2; */ + /** + * + * + *
+     * Optional. Job is a Hadoop job.
+     * 
+ * + * + * .google.cloud.dataproc.v1beta2.HadoopJob hadoop_job = 2 [(.google.api.field_behavior) = OPTIONAL]; + * + */ @java.lang.Override public com.google.cloud.dataproc.v1beta2.HadoopJobOrBuilder getHadoopJobOrBuilder() { if ((jobTypeCase_ == 2) && (hadoopJobBuilder_ != null)) { @@ -1888,7 +2132,17 @@ public com.google.cloud.dataproc.v1beta2.HadoopJobOrBuilder getHadoopJobOrBuilde return com.google.cloud.dataproc.v1beta2.HadoopJob.getDefaultInstance(); } } - /** .google.cloud.dataproc.v1beta2.HadoopJob hadoop_job = 2; */ + /** + * + * + *
+     * Optional. Job is a Hadoop job.
+     * 
+ * + * + * .google.cloud.dataproc.v1beta2.HadoopJob hadoop_job = 2 [(.google.api.field_behavior) = OPTIONAL]; + * + */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.dataproc.v1beta2.HadoopJob, com.google.cloud.dataproc.v1beta2.HadoopJob.Builder, @@ -1920,7 +2174,15 @@ public com.google.cloud.dataproc.v1beta2.HadoopJobOrBuilder getHadoopJobOrBuilde com.google.cloud.dataproc.v1beta2.SparkJobOrBuilder> sparkJobBuilder_; /** - * .google.cloud.dataproc.v1beta2.SparkJob spark_job = 3; + * + * + *
+     * Optional. Job is a Spark job.
+     * 
+ * + * + * .google.cloud.dataproc.v1beta2.SparkJob spark_job = 3 [(.google.api.field_behavior) = OPTIONAL]; + * * * @return Whether the sparkJob field is set. */ @@ -1929,7 +2191,15 @@ public boolean hasSparkJob() { return jobTypeCase_ == 3; } /** - * .google.cloud.dataproc.v1beta2.SparkJob spark_job = 3; + * + * + *
+     * Optional. Job is a Spark job.
+     * 
+ * + * + * .google.cloud.dataproc.v1beta2.SparkJob spark_job = 3 [(.google.api.field_behavior) = OPTIONAL]; + * * * @return The sparkJob. */ @@ -1947,7 +2217,17 @@ public com.google.cloud.dataproc.v1beta2.SparkJob getSparkJob() { return com.google.cloud.dataproc.v1beta2.SparkJob.getDefaultInstance(); } } - /** .google.cloud.dataproc.v1beta2.SparkJob spark_job = 3; */ + /** + * + * + *
+     * Optional. Job is a Spark job.
+     * 
+ * + * + * .google.cloud.dataproc.v1beta2.SparkJob spark_job = 3 [(.google.api.field_behavior) = OPTIONAL]; + * + */ public Builder setSparkJob(com.google.cloud.dataproc.v1beta2.SparkJob value) { if (sparkJobBuilder_ == null) { if (value == null) { @@ -1961,18 +2241,38 @@ public Builder setSparkJob(com.google.cloud.dataproc.v1beta2.SparkJob value) { jobTypeCase_ = 3; return this; } - /** .google.cloud.dataproc.v1beta2.SparkJob spark_job = 3; */ - public Builder setSparkJob(com.google.cloud.dataproc.v1beta2.SparkJob.Builder builderForValue) { - if (sparkJobBuilder_ == null) { - jobType_ = builderForValue.build(); - onChanged(); - } else { - sparkJobBuilder_.setMessage(builderForValue.build()); + /** + * + * + *
+     * Optional. Job is a Spark job.
+     * 
+ * + * + * .google.cloud.dataproc.v1beta2.SparkJob spark_job = 3 [(.google.api.field_behavior) = OPTIONAL]; + * + */ + public Builder setSparkJob(com.google.cloud.dataproc.v1beta2.SparkJob.Builder builderForValue) { + if (sparkJobBuilder_ == null) { + jobType_ = builderForValue.build(); + onChanged(); + } else { + sparkJobBuilder_.setMessage(builderForValue.build()); } jobTypeCase_ = 3; return this; } - /** .google.cloud.dataproc.v1beta2.SparkJob spark_job = 3; */ + /** + * + * + *
+     * Optional. Job is a Spark job.
+     * 
+ * + * + * .google.cloud.dataproc.v1beta2.SparkJob spark_job = 3 [(.google.api.field_behavior) = OPTIONAL]; + * + */ public Builder mergeSparkJob(com.google.cloud.dataproc.v1beta2.SparkJob value) { if (sparkJobBuilder_ == null) { if (jobTypeCase_ == 3 @@ -1995,7 +2295,17 @@ public Builder mergeSparkJob(com.google.cloud.dataproc.v1beta2.SparkJob value) { jobTypeCase_ = 3; return this; } - /** .google.cloud.dataproc.v1beta2.SparkJob spark_job = 3; */ + /** + * + * + *
+     * Optional. Job is a Spark job.
+     * 
+ * + * + * .google.cloud.dataproc.v1beta2.SparkJob spark_job = 3 [(.google.api.field_behavior) = OPTIONAL]; + * + */ public Builder clearSparkJob() { if (sparkJobBuilder_ == null) { if (jobTypeCase_ == 3) { @@ -2012,11 +2322,31 @@ public Builder clearSparkJob() { } return this; } - /** .google.cloud.dataproc.v1beta2.SparkJob spark_job = 3; */ + /** + * + * + *
+     * Optional. Job is a Spark job.
+     * 
+ * + * + * .google.cloud.dataproc.v1beta2.SparkJob spark_job = 3 [(.google.api.field_behavior) = OPTIONAL]; + * + */ public com.google.cloud.dataproc.v1beta2.SparkJob.Builder getSparkJobBuilder() { return getSparkJobFieldBuilder().getBuilder(); } - /** .google.cloud.dataproc.v1beta2.SparkJob spark_job = 3; */ + /** + * + * + *
+     * Optional. Job is a Spark job.
+     * 
+ * + * + * .google.cloud.dataproc.v1beta2.SparkJob spark_job = 3 [(.google.api.field_behavior) = OPTIONAL]; + * + */ @java.lang.Override public com.google.cloud.dataproc.v1beta2.SparkJobOrBuilder getSparkJobOrBuilder() { if ((jobTypeCase_ == 3) && (sparkJobBuilder_ != null)) { @@ -2028,7 +2358,17 @@ public com.google.cloud.dataproc.v1beta2.SparkJobOrBuilder getSparkJobOrBuilder( return com.google.cloud.dataproc.v1beta2.SparkJob.getDefaultInstance(); } } - /** .google.cloud.dataproc.v1beta2.SparkJob spark_job = 3; */ + /** + * + * + *
+     * Optional. Job is a Spark job.
+     * 
+ * + * + * .google.cloud.dataproc.v1beta2.SparkJob spark_job = 3 [(.google.api.field_behavior) = OPTIONAL]; + * + */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.dataproc.v1beta2.SparkJob, com.google.cloud.dataproc.v1beta2.SparkJob.Builder, @@ -2060,7 +2400,15 @@ public com.google.cloud.dataproc.v1beta2.SparkJobOrBuilder getSparkJobOrBuilder( com.google.cloud.dataproc.v1beta2.PySparkJobOrBuilder> pysparkJobBuilder_; /** - * .google.cloud.dataproc.v1beta2.PySparkJob pyspark_job = 4; + * + * + *
+     * Optional. Job is a PySpark job.
+     * 
+ * + * + * .google.cloud.dataproc.v1beta2.PySparkJob pyspark_job = 4 [(.google.api.field_behavior) = OPTIONAL]; + * * * @return Whether the pysparkJob field is set. */ @@ -2069,7 +2417,15 @@ public boolean hasPysparkJob() { return jobTypeCase_ == 4; } /** - * .google.cloud.dataproc.v1beta2.PySparkJob pyspark_job = 4; + * + * + *
+     * Optional. Job is a PySpark job.
+     * 
+ * + * + * .google.cloud.dataproc.v1beta2.PySparkJob pyspark_job = 4 [(.google.api.field_behavior) = OPTIONAL]; + * * * @return The pysparkJob. */ @@ -2087,7 +2443,17 @@ public com.google.cloud.dataproc.v1beta2.PySparkJob getPysparkJob() { return com.google.cloud.dataproc.v1beta2.PySparkJob.getDefaultInstance(); } } - /** .google.cloud.dataproc.v1beta2.PySparkJob pyspark_job = 4; */ + /** + * + * + *
+     * Optional. Job is a PySpark job.
+     * 
+ * + * + * .google.cloud.dataproc.v1beta2.PySparkJob pyspark_job = 4 [(.google.api.field_behavior) = OPTIONAL]; + * + */ public Builder setPysparkJob(com.google.cloud.dataproc.v1beta2.PySparkJob value) { if (pysparkJobBuilder_ == null) { if (value == null) { @@ -2101,7 +2467,17 @@ public Builder setPysparkJob(com.google.cloud.dataproc.v1beta2.PySparkJob value) jobTypeCase_ = 4; return this; } - /** .google.cloud.dataproc.v1beta2.PySparkJob pyspark_job = 4; */ + /** + * + * + *
+     * Optional. Job is a PySpark job.
+     * 
+ * + * + * .google.cloud.dataproc.v1beta2.PySparkJob pyspark_job = 4 [(.google.api.field_behavior) = OPTIONAL]; + * + */ public Builder setPysparkJob( com.google.cloud.dataproc.v1beta2.PySparkJob.Builder builderForValue) { if (pysparkJobBuilder_ == null) { @@ -2113,7 +2489,17 @@ public Builder setPysparkJob( jobTypeCase_ = 4; return this; } - /** .google.cloud.dataproc.v1beta2.PySparkJob pyspark_job = 4; */ + /** + * + * + *
+     * Optional. Job is a PySpark job.
+     * 
+ * + * + * .google.cloud.dataproc.v1beta2.PySparkJob pyspark_job = 4 [(.google.api.field_behavior) = OPTIONAL]; + * + */ public Builder mergePysparkJob(com.google.cloud.dataproc.v1beta2.PySparkJob value) { if (pysparkJobBuilder_ == null) { if (jobTypeCase_ == 4 @@ -2136,7 +2522,17 @@ public Builder mergePysparkJob(com.google.cloud.dataproc.v1beta2.PySparkJob valu jobTypeCase_ = 4; return this; } - /** .google.cloud.dataproc.v1beta2.PySparkJob pyspark_job = 4; */ + /** + * + * + *
+     * Optional. Job is a PySpark job.
+     * 
+ * + * + * .google.cloud.dataproc.v1beta2.PySparkJob pyspark_job = 4 [(.google.api.field_behavior) = OPTIONAL]; + * + */ public Builder clearPysparkJob() { if (pysparkJobBuilder_ == null) { if (jobTypeCase_ == 4) { @@ -2153,11 +2549,31 @@ public Builder clearPysparkJob() { } return this; } - /** .google.cloud.dataproc.v1beta2.PySparkJob pyspark_job = 4; */ + /** + * + * + *
+     * Optional. Job is a PySpark job.
+     * 
+ * + * + * .google.cloud.dataproc.v1beta2.PySparkJob pyspark_job = 4 [(.google.api.field_behavior) = OPTIONAL]; + * + */ public com.google.cloud.dataproc.v1beta2.PySparkJob.Builder getPysparkJobBuilder() { return getPysparkJobFieldBuilder().getBuilder(); } - /** .google.cloud.dataproc.v1beta2.PySparkJob pyspark_job = 4; */ + /** + * + * + *
+     * Optional. Job is a PySpark job.
+     * 
+ * + * + * .google.cloud.dataproc.v1beta2.PySparkJob pyspark_job = 4 [(.google.api.field_behavior) = OPTIONAL]; + * + */ @java.lang.Override public com.google.cloud.dataproc.v1beta2.PySparkJobOrBuilder getPysparkJobOrBuilder() { if ((jobTypeCase_ == 4) && (pysparkJobBuilder_ != null)) { @@ -2169,7 +2585,17 @@ public com.google.cloud.dataproc.v1beta2.PySparkJobOrBuilder getPysparkJobOrBuil return com.google.cloud.dataproc.v1beta2.PySparkJob.getDefaultInstance(); } } - /** .google.cloud.dataproc.v1beta2.PySparkJob pyspark_job = 4; */ + /** + * + * + *
+     * Optional. Job is a PySpark job.
+     * 
+ * + * + * .google.cloud.dataproc.v1beta2.PySparkJob pyspark_job = 4 [(.google.api.field_behavior) = OPTIONAL]; + * + */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.dataproc.v1beta2.PySparkJob, com.google.cloud.dataproc.v1beta2.PySparkJob.Builder, @@ -2201,7 +2627,15 @@ public com.google.cloud.dataproc.v1beta2.PySparkJobOrBuilder getPysparkJobOrBuil com.google.cloud.dataproc.v1beta2.HiveJobOrBuilder> hiveJobBuilder_; /** - * .google.cloud.dataproc.v1beta2.HiveJob hive_job = 5; + * + * + *
+     * Optional. Job is a Hive job.
+     * 
+ * + * + * .google.cloud.dataproc.v1beta2.HiveJob hive_job = 5 [(.google.api.field_behavior) = OPTIONAL]; + * * * @return Whether the hiveJob field is set. */ @@ -2210,7 +2644,15 @@ public boolean hasHiveJob() { return jobTypeCase_ == 5; } /** - * .google.cloud.dataproc.v1beta2.HiveJob hive_job = 5; + * + * + *
+     * Optional. Job is a Hive job.
+     * 
+ * + * + * .google.cloud.dataproc.v1beta2.HiveJob hive_job = 5 [(.google.api.field_behavior) = OPTIONAL]; + * * * @return The hiveJob. */ @@ -2228,7 +2670,17 @@ public com.google.cloud.dataproc.v1beta2.HiveJob getHiveJob() { return com.google.cloud.dataproc.v1beta2.HiveJob.getDefaultInstance(); } } - /** .google.cloud.dataproc.v1beta2.HiveJob hive_job = 5; */ + /** + * + * + *
+     * Optional. Job is a Hive job.
+     * 
+ * + * + * .google.cloud.dataproc.v1beta2.HiveJob hive_job = 5 [(.google.api.field_behavior) = OPTIONAL]; + * + */ public Builder setHiveJob(com.google.cloud.dataproc.v1beta2.HiveJob value) { if (hiveJobBuilder_ == null) { if (value == null) { @@ -2242,7 +2694,17 @@ public Builder setHiveJob(com.google.cloud.dataproc.v1beta2.HiveJob value) { jobTypeCase_ = 5; return this; } - /** .google.cloud.dataproc.v1beta2.HiveJob hive_job = 5; */ + /** + * + * + *
+     * Optional. Job is a Hive job.
+     * 
+ * + * + * .google.cloud.dataproc.v1beta2.HiveJob hive_job = 5 [(.google.api.field_behavior) = OPTIONAL]; + * + */ public Builder setHiveJob(com.google.cloud.dataproc.v1beta2.HiveJob.Builder builderForValue) { if (hiveJobBuilder_ == null) { jobType_ = builderForValue.build(); @@ -2253,7 +2715,17 @@ public Builder setHiveJob(com.google.cloud.dataproc.v1beta2.HiveJob.Builder buil jobTypeCase_ = 5; return this; } - /** .google.cloud.dataproc.v1beta2.HiveJob hive_job = 5; */ + /** + * + * + *
+     * Optional. Job is a Hive job.
+     * 
+ * + * + * .google.cloud.dataproc.v1beta2.HiveJob hive_job = 5 [(.google.api.field_behavior) = OPTIONAL]; + * + */ public Builder mergeHiveJob(com.google.cloud.dataproc.v1beta2.HiveJob value) { if (hiveJobBuilder_ == null) { if (jobTypeCase_ == 5 @@ -2276,7 +2748,17 @@ public Builder mergeHiveJob(com.google.cloud.dataproc.v1beta2.HiveJob value) { jobTypeCase_ = 5; return this; } - /** .google.cloud.dataproc.v1beta2.HiveJob hive_job = 5; */ + /** + * + * + *
+     * Optional. Job is a Hive job.
+     * 
+ * + * + * .google.cloud.dataproc.v1beta2.HiveJob hive_job = 5 [(.google.api.field_behavior) = OPTIONAL]; + * + */ public Builder clearHiveJob() { if (hiveJobBuilder_ == null) { if (jobTypeCase_ == 5) { @@ -2293,11 +2775,31 @@ public Builder clearHiveJob() { } return this; } - /** .google.cloud.dataproc.v1beta2.HiveJob hive_job = 5; */ + /** + * + * + *
+     * Optional. Job is a Hive job.
+     * 
+ * + * + * .google.cloud.dataproc.v1beta2.HiveJob hive_job = 5 [(.google.api.field_behavior) = OPTIONAL]; + * + */ public com.google.cloud.dataproc.v1beta2.HiveJob.Builder getHiveJobBuilder() { return getHiveJobFieldBuilder().getBuilder(); } - /** .google.cloud.dataproc.v1beta2.HiveJob hive_job = 5; */ + /** + * + * + *
+     * Optional. Job is a Hive job.
+     * 
+ * + * + * .google.cloud.dataproc.v1beta2.HiveJob hive_job = 5 [(.google.api.field_behavior) = OPTIONAL]; + * + */ @java.lang.Override public com.google.cloud.dataproc.v1beta2.HiveJobOrBuilder getHiveJobOrBuilder() { if ((jobTypeCase_ == 5) && (hiveJobBuilder_ != null)) { @@ -2309,7 +2811,17 @@ public com.google.cloud.dataproc.v1beta2.HiveJobOrBuilder getHiveJobOrBuilder() return com.google.cloud.dataproc.v1beta2.HiveJob.getDefaultInstance(); } } - /** .google.cloud.dataproc.v1beta2.HiveJob hive_job = 5; */ + /** + * + * + *
+     * Optional. Job is a Hive job.
+     * 
+ * + * + * .google.cloud.dataproc.v1beta2.HiveJob hive_job = 5 [(.google.api.field_behavior) = OPTIONAL]; + * + */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.dataproc.v1beta2.HiveJob, com.google.cloud.dataproc.v1beta2.HiveJob.Builder, @@ -2341,7 +2853,15 @@ public com.google.cloud.dataproc.v1beta2.HiveJobOrBuilder getHiveJobOrBuilder() com.google.cloud.dataproc.v1beta2.PigJobOrBuilder> pigJobBuilder_; /** - * .google.cloud.dataproc.v1beta2.PigJob pig_job = 6; + * + * + *
+     * Optional. Job is a Pig job.
+     * 
+ * + * + * .google.cloud.dataproc.v1beta2.PigJob pig_job = 6 [(.google.api.field_behavior) = OPTIONAL]; + * * * @return Whether the pigJob field is set. */ @@ -2350,7 +2870,15 @@ public boolean hasPigJob() { return jobTypeCase_ == 6; } /** - * .google.cloud.dataproc.v1beta2.PigJob pig_job = 6; + * + * + *
+     * Optional. Job is a Pig job.
+     * 
+ * + * + * .google.cloud.dataproc.v1beta2.PigJob pig_job = 6 [(.google.api.field_behavior) = OPTIONAL]; + * * * @return The pigJob. */ @@ -2368,7 +2896,17 @@ public com.google.cloud.dataproc.v1beta2.PigJob getPigJob() { return com.google.cloud.dataproc.v1beta2.PigJob.getDefaultInstance(); } } - /** .google.cloud.dataproc.v1beta2.PigJob pig_job = 6; */ + /** + * + * + *
+     * Optional. Job is a Pig job.
+     * 
+ * + * + * .google.cloud.dataproc.v1beta2.PigJob pig_job = 6 [(.google.api.field_behavior) = OPTIONAL]; + * + */ public Builder setPigJob(com.google.cloud.dataproc.v1beta2.PigJob value) { if (pigJobBuilder_ == null) { if (value == null) { @@ -2382,7 +2920,17 @@ public Builder setPigJob(com.google.cloud.dataproc.v1beta2.PigJob value) { jobTypeCase_ = 6; return this; } - /** .google.cloud.dataproc.v1beta2.PigJob pig_job = 6; */ + /** + * + * + *
+     * Optional. Job is a Pig job.
+     * 
+ * + * + * .google.cloud.dataproc.v1beta2.PigJob pig_job = 6 [(.google.api.field_behavior) = OPTIONAL]; + * + */ public Builder setPigJob(com.google.cloud.dataproc.v1beta2.PigJob.Builder builderForValue) { if (pigJobBuilder_ == null) { jobType_ = builderForValue.build(); @@ -2393,7 +2941,17 @@ public Builder setPigJob(com.google.cloud.dataproc.v1beta2.PigJob.Builder builde jobTypeCase_ = 6; return this; } - /** .google.cloud.dataproc.v1beta2.PigJob pig_job = 6; */ + /** + * + * + *
+     * Optional. Job is a Pig job.
+     * 
+ * + * + * .google.cloud.dataproc.v1beta2.PigJob pig_job = 6 [(.google.api.field_behavior) = OPTIONAL]; + * + */ public Builder mergePigJob(com.google.cloud.dataproc.v1beta2.PigJob value) { if (pigJobBuilder_ == null) { if (jobTypeCase_ == 6 @@ -2416,7 +2974,17 @@ public Builder mergePigJob(com.google.cloud.dataproc.v1beta2.PigJob value) { jobTypeCase_ = 6; return this; } - /** .google.cloud.dataproc.v1beta2.PigJob pig_job = 6; */ + /** + * + * + *
+     * Optional. Job is a Pig job.
+     * 
+ * + * + * .google.cloud.dataproc.v1beta2.PigJob pig_job = 6 [(.google.api.field_behavior) = OPTIONAL]; + * + */ public Builder clearPigJob() { if (pigJobBuilder_ == null) { if (jobTypeCase_ == 6) { @@ -2433,11 +3001,31 @@ public Builder clearPigJob() { } return this; } - /** .google.cloud.dataproc.v1beta2.PigJob pig_job = 6; */ + /** + * + * + *
+     * Optional. Job is a Pig job.
+     * 
+ * + * + * .google.cloud.dataproc.v1beta2.PigJob pig_job = 6 [(.google.api.field_behavior) = OPTIONAL]; + * + */ public com.google.cloud.dataproc.v1beta2.PigJob.Builder getPigJobBuilder() { return getPigJobFieldBuilder().getBuilder(); } - /** .google.cloud.dataproc.v1beta2.PigJob pig_job = 6; */ + /** + * + * + *
+     * Optional. Job is a Pig job.
+     * 
+ * + * + * .google.cloud.dataproc.v1beta2.PigJob pig_job = 6 [(.google.api.field_behavior) = OPTIONAL]; + * + */ @java.lang.Override public com.google.cloud.dataproc.v1beta2.PigJobOrBuilder getPigJobOrBuilder() { if ((jobTypeCase_ == 6) && (pigJobBuilder_ != null)) { @@ -2449,7 +3037,17 @@ public com.google.cloud.dataproc.v1beta2.PigJobOrBuilder getPigJobOrBuilder() { return com.google.cloud.dataproc.v1beta2.PigJob.getDefaultInstance(); } } - /** .google.cloud.dataproc.v1beta2.PigJob pig_job = 6; */ + /** + * + * + *
+     * Optional. Job is a Pig job.
+     * 
+ * + * + * .google.cloud.dataproc.v1beta2.PigJob pig_job = 6 [(.google.api.field_behavior) = OPTIONAL]; + * + */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.dataproc.v1beta2.PigJob, com.google.cloud.dataproc.v1beta2.PigJob.Builder, @@ -2484,10 +3082,12 @@ public com.google.cloud.dataproc.v1beta2.PigJobOrBuilder getPigJobOrBuilder() { * * *
-     * Spark R job
+     * Optional. Job is a SparkR job.
      * 
* - * .google.cloud.dataproc.v1beta2.SparkRJob spark_r_job = 11; + * + * .google.cloud.dataproc.v1beta2.SparkRJob spark_r_job = 11 [(.google.api.field_behavior) = OPTIONAL]; + * * * @return Whether the sparkRJob field is set. */ @@ -2499,10 +3099,12 @@ public boolean hasSparkRJob() { * * *
-     * Spark R job
+     * Optional. Job is a SparkR job.
      * 
* - * .google.cloud.dataproc.v1beta2.SparkRJob spark_r_job = 11; + * + * .google.cloud.dataproc.v1beta2.SparkRJob spark_r_job = 11 [(.google.api.field_behavior) = OPTIONAL]; + * * * @return The sparkRJob. */ @@ -2524,10 +3126,12 @@ public com.google.cloud.dataproc.v1beta2.SparkRJob getSparkRJob() { * * *
-     * Spark R job
+     * Optional. Job is a SparkR job.
      * 
* - * .google.cloud.dataproc.v1beta2.SparkRJob spark_r_job = 11; + * + * .google.cloud.dataproc.v1beta2.SparkRJob spark_r_job = 11 [(.google.api.field_behavior) = OPTIONAL]; + * */ public Builder setSparkRJob(com.google.cloud.dataproc.v1beta2.SparkRJob value) { if (sparkRJobBuilder_ == null) { @@ -2546,10 +3150,12 @@ public Builder setSparkRJob(com.google.cloud.dataproc.v1beta2.SparkRJob value) { * * *
-     * Spark R job
+     * Optional. Job is a SparkR job.
      * 
* - * .google.cloud.dataproc.v1beta2.SparkRJob spark_r_job = 11; + * + * .google.cloud.dataproc.v1beta2.SparkRJob spark_r_job = 11 [(.google.api.field_behavior) = OPTIONAL]; + * */ public Builder setSparkRJob( com.google.cloud.dataproc.v1beta2.SparkRJob.Builder builderForValue) { @@ -2566,10 +3172,12 @@ public Builder setSparkRJob( * * *
-     * Spark R job
+     * Optional. Job is a SparkR job.
      * 
* - * .google.cloud.dataproc.v1beta2.SparkRJob spark_r_job = 11; + * + * .google.cloud.dataproc.v1beta2.SparkRJob spark_r_job = 11 [(.google.api.field_behavior) = OPTIONAL]; + * */ public Builder mergeSparkRJob(com.google.cloud.dataproc.v1beta2.SparkRJob value) { if (sparkRJobBuilder_ == null) { @@ -2597,10 +3205,12 @@ public Builder mergeSparkRJob(com.google.cloud.dataproc.v1beta2.SparkRJob value) * * *
-     * Spark R job
+     * Optional. Job is a SparkR job.
      * 
* - * .google.cloud.dataproc.v1beta2.SparkRJob spark_r_job = 11; + * + * .google.cloud.dataproc.v1beta2.SparkRJob spark_r_job = 11 [(.google.api.field_behavior) = OPTIONAL]; + * */ public Builder clearSparkRJob() { if (sparkRJobBuilder_ == null) { @@ -2622,10 +3232,12 @@ public Builder clearSparkRJob() { * * *
-     * Spark R job
+     * Optional. Job is a SparkR job.
      * 
* - * .google.cloud.dataproc.v1beta2.SparkRJob spark_r_job = 11; + * + * .google.cloud.dataproc.v1beta2.SparkRJob spark_r_job = 11 [(.google.api.field_behavior) = OPTIONAL]; + * */ public com.google.cloud.dataproc.v1beta2.SparkRJob.Builder getSparkRJobBuilder() { return getSparkRJobFieldBuilder().getBuilder(); @@ -2634,10 +3246,12 @@ public com.google.cloud.dataproc.v1beta2.SparkRJob.Builder getSparkRJobBuilder() * * *
-     * Spark R job
+     * Optional. Job is a SparkR job.
      * 
* - * .google.cloud.dataproc.v1beta2.SparkRJob spark_r_job = 11; + * + * .google.cloud.dataproc.v1beta2.SparkRJob spark_r_job = 11 [(.google.api.field_behavior) = OPTIONAL]; + * */ @java.lang.Override public com.google.cloud.dataproc.v1beta2.SparkRJobOrBuilder getSparkRJobOrBuilder() { @@ -2654,10 +3268,12 @@ public com.google.cloud.dataproc.v1beta2.SparkRJobOrBuilder getSparkRJobOrBuilde * * *
-     * Spark R job
+     * Optional. Job is a SparkR job.
      * 
* - * .google.cloud.dataproc.v1beta2.SparkRJob spark_r_job = 11; + * + * .google.cloud.dataproc.v1beta2.SparkRJob spark_r_job = 11 [(.google.api.field_behavior) = OPTIONAL]; + * */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.dataproc.v1beta2.SparkRJob, @@ -2690,7 +3306,15 @@ public com.google.cloud.dataproc.v1beta2.SparkRJobOrBuilder getSparkRJobOrBuilde com.google.cloud.dataproc.v1beta2.SparkSqlJobOrBuilder> sparkSqlJobBuilder_; /** - * .google.cloud.dataproc.v1beta2.SparkSqlJob spark_sql_job = 7; + * + * + *
+     * Optional. Job is a SparkSql job.
+     * 
+ * + * + * .google.cloud.dataproc.v1beta2.SparkSqlJob spark_sql_job = 7 [(.google.api.field_behavior) = OPTIONAL]; + * * * @return Whether the sparkSqlJob field is set. */ @@ -2699,7 +3323,15 @@ public boolean hasSparkSqlJob() { return jobTypeCase_ == 7; } /** - * .google.cloud.dataproc.v1beta2.SparkSqlJob spark_sql_job = 7; + * + * + *
+     * Optional. Job is a SparkSql job.
+     * 
+ * + * + * .google.cloud.dataproc.v1beta2.SparkSqlJob spark_sql_job = 7 [(.google.api.field_behavior) = OPTIONAL]; + * * * @return The sparkSqlJob. */ @@ -2717,7 +3349,17 @@ public com.google.cloud.dataproc.v1beta2.SparkSqlJob getSparkSqlJob() { return com.google.cloud.dataproc.v1beta2.SparkSqlJob.getDefaultInstance(); } } - /** .google.cloud.dataproc.v1beta2.SparkSqlJob spark_sql_job = 7; */ + /** + * + * + *
+     * Optional. Job is a SparkSql job.
+     * 
+ * + * + * .google.cloud.dataproc.v1beta2.SparkSqlJob spark_sql_job = 7 [(.google.api.field_behavior) = OPTIONAL]; + * + */ public Builder setSparkSqlJob(com.google.cloud.dataproc.v1beta2.SparkSqlJob value) { if (sparkSqlJobBuilder_ == null) { if (value == null) { @@ -2731,7 +3373,17 @@ public Builder setSparkSqlJob(com.google.cloud.dataproc.v1beta2.SparkSqlJob valu jobTypeCase_ = 7; return this; } - /** .google.cloud.dataproc.v1beta2.SparkSqlJob spark_sql_job = 7; */ + /** + * + * + *
+     * Optional. Job is a SparkSql job.
+     * 
+ * + * + * .google.cloud.dataproc.v1beta2.SparkSqlJob spark_sql_job = 7 [(.google.api.field_behavior) = OPTIONAL]; + * + */ public Builder setSparkSqlJob( com.google.cloud.dataproc.v1beta2.SparkSqlJob.Builder builderForValue) { if (sparkSqlJobBuilder_ == null) { @@ -2743,7 +3395,17 @@ public Builder setSparkSqlJob( jobTypeCase_ = 7; return this; } - /** .google.cloud.dataproc.v1beta2.SparkSqlJob spark_sql_job = 7; */ + /** + * + * + *
+     * Optional. Job is a SparkSql job.
+     * 
+ * + * + * .google.cloud.dataproc.v1beta2.SparkSqlJob spark_sql_job = 7 [(.google.api.field_behavior) = OPTIONAL]; + * + */ public Builder mergeSparkSqlJob(com.google.cloud.dataproc.v1beta2.SparkSqlJob value) { if (sparkSqlJobBuilder_ == null) { if (jobTypeCase_ == 7 @@ -2766,7 +3428,17 @@ public Builder mergeSparkSqlJob(com.google.cloud.dataproc.v1beta2.SparkSqlJob va jobTypeCase_ = 7; return this; } - /** .google.cloud.dataproc.v1beta2.SparkSqlJob spark_sql_job = 7; */ + /** + * + * + *
+     * Optional. Job is a SparkSql job.
+     * 
+ * + * + * .google.cloud.dataproc.v1beta2.SparkSqlJob spark_sql_job = 7 [(.google.api.field_behavior) = OPTIONAL]; + * + */ public Builder clearSparkSqlJob() { if (sparkSqlJobBuilder_ == null) { if (jobTypeCase_ == 7) { @@ -2783,11 +3455,31 @@ public Builder clearSparkSqlJob() { } return this; } - /** .google.cloud.dataproc.v1beta2.SparkSqlJob spark_sql_job = 7; */ + /** + * + * + *
+     * Optional. Job is a SparkSql job.
+     * 
+ * + * + * .google.cloud.dataproc.v1beta2.SparkSqlJob spark_sql_job = 7 [(.google.api.field_behavior) = OPTIONAL]; + * + */ public com.google.cloud.dataproc.v1beta2.SparkSqlJob.Builder getSparkSqlJobBuilder() { return getSparkSqlJobFieldBuilder().getBuilder(); } - /** .google.cloud.dataproc.v1beta2.SparkSqlJob spark_sql_job = 7; */ + /** + * + * + *
+     * Optional. Job is a SparkSql job.
+     * 
+ * + * + * .google.cloud.dataproc.v1beta2.SparkSqlJob spark_sql_job = 7 [(.google.api.field_behavior) = OPTIONAL]; + * + */ @java.lang.Override public com.google.cloud.dataproc.v1beta2.SparkSqlJobOrBuilder getSparkSqlJobOrBuilder() { if ((jobTypeCase_ == 7) && (sparkSqlJobBuilder_ != null)) { @@ -2799,7 +3491,17 @@ public com.google.cloud.dataproc.v1beta2.SparkSqlJobOrBuilder getSparkSqlJobOrBu return com.google.cloud.dataproc.v1beta2.SparkSqlJob.getDefaultInstance(); } } - /** .google.cloud.dataproc.v1beta2.SparkSqlJob spark_sql_job = 7; */ + /** + * + * + *
+     * Optional. Job is a SparkSql job.
+     * 
+ * + * + * .google.cloud.dataproc.v1beta2.SparkSqlJob spark_sql_job = 7 [(.google.api.field_behavior) = OPTIONAL]; + * + */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.dataproc.v1beta2.SparkSqlJob, com.google.cloud.dataproc.v1beta2.SparkSqlJob.Builder, @@ -2834,10 +3536,12 @@ public com.google.cloud.dataproc.v1beta2.SparkSqlJobOrBuilder getSparkSqlJobOrBu * * *
-     * Presto job
+     * Optional. Job is a Presto job.
      * 
* - * .google.cloud.dataproc.v1beta2.PrestoJob presto_job = 12; + * + * .google.cloud.dataproc.v1beta2.PrestoJob presto_job = 12 [(.google.api.field_behavior) = OPTIONAL]; + * * * @return Whether the prestoJob field is set. */ @@ -2849,10 +3553,12 @@ public boolean hasPrestoJob() { * * *
-     * Presto job
+     * Optional. Job is a Presto job.
      * 
* - * .google.cloud.dataproc.v1beta2.PrestoJob presto_job = 12; + * + * .google.cloud.dataproc.v1beta2.PrestoJob presto_job = 12 [(.google.api.field_behavior) = OPTIONAL]; + * * * @return The prestoJob. */ @@ -2874,10 +3580,12 @@ public com.google.cloud.dataproc.v1beta2.PrestoJob getPrestoJob() { * * *
-     * Presto job
+     * Optional. Job is a Presto job.
      * 
* - * .google.cloud.dataproc.v1beta2.PrestoJob presto_job = 12; + * + * .google.cloud.dataproc.v1beta2.PrestoJob presto_job = 12 [(.google.api.field_behavior) = OPTIONAL]; + * */ public Builder setPrestoJob(com.google.cloud.dataproc.v1beta2.PrestoJob value) { if (prestoJobBuilder_ == null) { @@ -2896,10 +3604,12 @@ public Builder setPrestoJob(com.google.cloud.dataproc.v1beta2.PrestoJob value) { * * *
-     * Presto job
+     * Optional. Job is a Presto job.
      * 
* - * .google.cloud.dataproc.v1beta2.PrestoJob presto_job = 12; + * + * .google.cloud.dataproc.v1beta2.PrestoJob presto_job = 12 [(.google.api.field_behavior) = OPTIONAL]; + * */ public Builder setPrestoJob( com.google.cloud.dataproc.v1beta2.PrestoJob.Builder builderForValue) { @@ -2916,10 +3626,12 @@ public Builder setPrestoJob( * * *
-     * Presto job
+     * Optional. Job is a Presto job.
      * 
* - * .google.cloud.dataproc.v1beta2.PrestoJob presto_job = 12; + * + * .google.cloud.dataproc.v1beta2.PrestoJob presto_job = 12 [(.google.api.field_behavior) = OPTIONAL]; + * */ public Builder mergePrestoJob(com.google.cloud.dataproc.v1beta2.PrestoJob value) { if (prestoJobBuilder_ == null) { @@ -2947,10 +3659,12 @@ public Builder mergePrestoJob(com.google.cloud.dataproc.v1beta2.PrestoJob value) * * *
-     * Presto job
+     * Optional. Job is a Presto job.
      * 
* - * .google.cloud.dataproc.v1beta2.PrestoJob presto_job = 12; + * + * .google.cloud.dataproc.v1beta2.PrestoJob presto_job = 12 [(.google.api.field_behavior) = OPTIONAL]; + * */ public Builder clearPrestoJob() { if (prestoJobBuilder_ == null) { @@ -2972,10 +3686,12 @@ public Builder clearPrestoJob() { * * *
-     * Presto job
+     * Optional. Job is a Presto job.
      * 
* - * .google.cloud.dataproc.v1beta2.PrestoJob presto_job = 12; + * + * .google.cloud.dataproc.v1beta2.PrestoJob presto_job = 12 [(.google.api.field_behavior) = OPTIONAL]; + * */ public com.google.cloud.dataproc.v1beta2.PrestoJob.Builder getPrestoJobBuilder() { return getPrestoJobFieldBuilder().getBuilder(); @@ -2984,10 +3700,12 @@ public com.google.cloud.dataproc.v1beta2.PrestoJob.Builder getPrestoJobBuilder() * * *
-     * Presto job
+     * Optional. Job is a Presto job.
      * 
* - * .google.cloud.dataproc.v1beta2.PrestoJob presto_job = 12; + * + * .google.cloud.dataproc.v1beta2.PrestoJob presto_job = 12 [(.google.api.field_behavior) = OPTIONAL]; + * */ @java.lang.Override public com.google.cloud.dataproc.v1beta2.PrestoJobOrBuilder getPrestoJobOrBuilder() { @@ -3004,10 +3722,12 @@ public com.google.cloud.dataproc.v1beta2.PrestoJobOrBuilder getPrestoJobOrBuilde * * *
-     * Presto job
+     * Optional. Job is a Presto job.
      * 
* - * .google.cloud.dataproc.v1beta2.PrestoJob presto_job = 12; + * + * .google.cloud.dataproc.v1beta2.PrestoJob presto_job = 12 [(.google.api.field_behavior) = OPTIONAL]; + * */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.dataproc.v1beta2.PrestoJob, diff --git a/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/OrderedJobOrBuilder.java b/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/OrderedJobOrBuilder.java index 053134b8..12c88a63 100644 --- a/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/OrderedJobOrBuilder.java +++ b/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/OrderedJobOrBuilder.java @@ -31,8 +31,8 @@ public interface OrderedJobOrBuilder * within the template. * The step id is used as prefix for job id, as job * `goog-dataproc-workflow-step-id` label, and in - * [prerequisiteStepIds][google.cloud.dataproc.v1beta2.OrderedJob.prerequisite_step_ids] field from other - * steps. + * [prerequisiteStepIds][google.cloud.dataproc.v1beta2.OrderedJob.prerequisite_step_ids] + * field from other steps. * The id must contain only letters (a-z, A-Z), numbers (0-9), * underscores (_), and hyphens (-). Cannot begin or end with underscore * or hyphen. Must consist of between 3 and 50 characters. @@ -51,8 +51,8 @@ public interface OrderedJobOrBuilder * within the template. * The step id is used as prefix for job id, as job * `goog-dataproc-workflow-step-id` label, and in - * [prerequisiteStepIds][google.cloud.dataproc.v1beta2.OrderedJob.prerequisite_step_ids] field from other - * steps. + * [prerequisiteStepIds][google.cloud.dataproc.v1beta2.OrderedJob.prerequisite_step_ids] + * field from other steps. * The id must contain only letters (a-z, A-Z), numbers (0-9), * underscores (_), and hyphens (-). Cannot begin or end with underscore * or hyphen. Must consist of between 3 and 50 characters. @@ -65,88 +65,220 @@ public interface OrderedJobOrBuilder com.google.protobuf.ByteString getStepIdBytes(); /** - * .google.cloud.dataproc.v1beta2.HadoopJob hadoop_job = 2; + * + * + *
+   * Optional. Job is a Hadoop job.
+   * 
+ * + * + * .google.cloud.dataproc.v1beta2.HadoopJob hadoop_job = 2 [(.google.api.field_behavior) = OPTIONAL]; + * * * @return Whether the hadoopJob field is set. */ boolean hasHadoopJob(); /** - * .google.cloud.dataproc.v1beta2.HadoopJob hadoop_job = 2; + * + * + *
+   * Optional. Job is a Hadoop job.
+   * 
+ * + * + * .google.cloud.dataproc.v1beta2.HadoopJob hadoop_job = 2 [(.google.api.field_behavior) = OPTIONAL]; + * * * @return The hadoopJob. */ com.google.cloud.dataproc.v1beta2.HadoopJob getHadoopJob(); - /** .google.cloud.dataproc.v1beta2.HadoopJob hadoop_job = 2; */ + /** + * + * + *
+   * Optional. Job is a Hadoop job.
+   * 
+ * + * + * .google.cloud.dataproc.v1beta2.HadoopJob hadoop_job = 2 [(.google.api.field_behavior) = OPTIONAL]; + * + */ com.google.cloud.dataproc.v1beta2.HadoopJobOrBuilder getHadoopJobOrBuilder(); /** - * .google.cloud.dataproc.v1beta2.SparkJob spark_job = 3; + * + * + *
+   * Optional. Job is a Spark job.
+   * 
+ * + * + * .google.cloud.dataproc.v1beta2.SparkJob spark_job = 3 [(.google.api.field_behavior) = OPTIONAL]; + * * * @return Whether the sparkJob field is set. */ boolean hasSparkJob(); /** - * .google.cloud.dataproc.v1beta2.SparkJob spark_job = 3; + * + * + *
+   * Optional. Job is a Spark job.
+   * 
+ * + * + * .google.cloud.dataproc.v1beta2.SparkJob spark_job = 3 [(.google.api.field_behavior) = OPTIONAL]; + * * * @return The sparkJob. */ com.google.cloud.dataproc.v1beta2.SparkJob getSparkJob(); - /** .google.cloud.dataproc.v1beta2.SparkJob spark_job = 3; */ + /** + * + * + *
+   * Optional. Job is a Spark job.
+   * 
+ * + * + * .google.cloud.dataproc.v1beta2.SparkJob spark_job = 3 [(.google.api.field_behavior) = OPTIONAL]; + * + */ com.google.cloud.dataproc.v1beta2.SparkJobOrBuilder getSparkJobOrBuilder(); /** - * .google.cloud.dataproc.v1beta2.PySparkJob pyspark_job = 4; + * + * + *
+   * Optional. Job is a PySpark job.
+   * 
+ * + * + * .google.cloud.dataproc.v1beta2.PySparkJob pyspark_job = 4 [(.google.api.field_behavior) = OPTIONAL]; + * * * @return Whether the pysparkJob field is set. */ boolean hasPysparkJob(); /** - * .google.cloud.dataproc.v1beta2.PySparkJob pyspark_job = 4; + * + * + *
+   * Optional. Job is a PySpark job.
+   * 
+ * + * + * .google.cloud.dataproc.v1beta2.PySparkJob pyspark_job = 4 [(.google.api.field_behavior) = OPTIONAL]; + * * * @return The pysparkJob. */ com.google.cloud.dataproc.v1beta2.PySparkJob getPysparkJob(); - /** .google.cloud.dataproc.v1beta2.PySparkJob pyspark_job = 4; */ + /** + * + * + *
+   * Optional. Job is a PySpark job.
+   * 
+ * + * + * .google.cloud.dataproc.v1beta2.PySparkJob pyspark_job = 4 [(.google.api.field_behavior) = OPTIONAL]; + * + */ com.google.cloud.dataproc.v1beta2.PySparkJobOrBuilder getPysparkJobOrBuilder(); /** - * .google.cloud.dataproc.v1beta2.HiveJob hive_job = 5; + * + * + *
+   * Optional. Job is a Hive job.
+   * 
+ * + * + * .google.cloud.dataproc.v1beta2.HiveJob hive_job = 5 [(.google.api.field_behavior) = OPTIONAL]; + * * * @return Whether the hiveJob field is set. */ boolean hasHiveJob(); /** - * .google.cloud.dataproc.v1beta2.HiveJob hive_job = 5; + * + * + *
+   * Optional. Job is a Hive job.
+   * 
+ * + * + * .google.cloud.dataproc.v1beta2.HiveJob hive_job = 5 [(.google.api.field_behavior) = OPTIONAL]; + * * * @return The hiveJob. */ com.google.cloud.dataproc.v1beta2.HiveJob getHiveJob(); - /** .google.cloud.dataproc.v1beta2.HiveJob hive_job = 5; */ + /** + * + * + *
+   * Optional. Job is a Hive job.
+   * 
+ * + * + * .google.cloud.dataproc.v1beta2.HiveJob hive_job = 5 [(.google.api.field_behavior) = OPTIONAL]; + * + */ com.google.cloud.dataproc.v1beta2.HiveJobOrBuilder getHiveJobOrBuilder(); /** - * .google.cloud.dataproc.v1beta2.PigJob pig_job = 6; + * + * + *
+   * Optional. Job is a Pig job.
+   * 
+ * + * + * .google.cloud.dataproc.v1beta2.PigJob pig_job = 6 [(.google.api.field_behavior) = OPTIONAL]; + * * * @return Whether the pigJob field is set. */ boolean hasPigJob(); /** - * .google.cloud.dataproc.v1beta2.PigJob pig_job = 6; + * + * + *
+   * Optional. Job is a Pig job.
+   * 
+ * + * + * .google.cloud.dataproc.v1beta2.PigJob pig_job = 6 [(.google.api.field_behavior) = OPTIONAL]; + * * * @return The pigJob. */ com.google.cloud.dataproc.v1beta2.PigJob getPigJob(); - /** .google.cloud.dataproc.v1beta2.PigJob pig_job = 6; */ + /** + * + * + *
+   * Optional. Job is a Pig job.
+   * 
+ * + * + * .google.cloud.dataproc.v1beta2.PigJob pig_job = 6 [(.google.api.field_behavior) = OPTIONAL]; + * + */ com.google.cloud.dataproc.v1beta2.PigJobOrBuilder getPigJobOrBuilder(); /** * * *
-   * Spark R job
+   * Optional. Job is a SparkR job.
    * 
* - * .google.cloud.dataproc.v1beta2.SparkRJob spark_r_job = 11; + * + * .google.cloud.dataproc.v1beta2.SparkRJob spark_r_job = 11 [(.google.api.field_behavior) = OPTIONAL]; + * * * @return Whether the sparkRJob field is set. */ @@ -155,10 +287,12 @@ public interface OrderedJobOrBuilder * * *
-   * Spark R job
+   * Optional. Job is a SparkR job.
    * 
* - * .google.cloud.dataproc.v1beta2.SparkRJob spark_r_job = 11; + * + * .google.cloud.dataproc.v1beta2.SparkRJob spark_r_job = 11 [(.google.api.field_behavior) = OPTIONAL]; + * * * @return The sparkRJob. */ @@ -167,36 +301,66 @@ public interface OrderedJobOrBuilder * * *
-   * Spark R job
+   * Optional. Job is a SparkR job.
    * 
* - * .google.cloud.dataproc.v1beta2.SparkRJob spark_r_job = 11; + * + * .google.cloud.dataproc.v1beta2.SparkRJob spark_r_job = 11 [(.google.api.field_behavior) = OPTIONAL]; + * */ com.google.cloud.dataproc.v1beta2.SparkRJobOrBuilder getSparkRJobOrBuilder(); /** - * .google.cloud.dataproc.v1beta2.SparkSqlJob spark_sql_job = 7; + * + * + *
+   * Optional. Job is a SparkSql job.
+   * 
+ * + * + * .google.cloud.dataproc.v1beta2.SparkSqlJob spark_sql_job = 7 [(.google.api.field_behavior) = OPTIONAL]; + * * * @return Whether the sparkSqlJob field is set. */ boolean hasSparkSqlJob(); /** - * .google.cloud.dataproc.v1beta2.SparkSqlJob spark_sql_job = 7; + * + * + *
+   * Optional. Job is a SparkSql job.
+   * 
+ * + * + * .google.cloud.dataproc.v1beta2.SparkSqlJob spark_sql_job = 7 [(.google.api.field_behavior) = OPTIONAL]; + * * * @return The sparkSqlJob. */ com.google.cloud.dataproc.v1beta2.SparkSqlJob getSparkSqlJob(); - /** .google.cloud.dataproc.v1beta2.SparkSqlJob spark_sql_job = 7; */ + /** + * + * + *
+   * Optional. Job is a SparkSql job.
+   * 
+ * + * + * .google.cloud.dataproc.v1beta2.SparkSqlJob spark_sql_job = 7 [(.google.api.field_behavior) = OPTIONAL]; + * + */ com.google.cloud.dataproc.v1beta2.SparkSqlJobOrBuilder getSparkSqlJobOrBuilder(); /** * * *
-   * Presto job
+   * Optional. Job is a Presto job.
    * 
* - * .google.cloud.dataproc.v1beta2.PrestoJob presto_job = 12; + * + * .google.cloud.dataproc.v1beta2.PrestoJob presto_job = 12 [(.google.api.field_behavior) = OPTIONAL]; + * * * @return Whether the prestoJob field is set. */ @@ -205,10 +369,12 @@ public interface OrderedJobOrBuilder * * *
-   * Presto job
+   * Optional. Job is a Presto job.
    * 
* - * .google.cloud.dataproc.v1beta2.PrestoJob presto_job = 12; + * + * .google.cloud.dataproc.v1beta2.PrestoJob presto_job = 12 [(.google.api.field_behavior) = OPTIONAL]; + * * * @return The prestoJob. */ @@ -217,10 +383,12 @@ public interface OrderedJobOrBuilder * * *
-   * Presto job
+   * Optional. Job is a Presto job.
    * 
* - * .google.cloud.dataproc.v1beta2.PrestoJob presto_job = 12; + * + * .google.cloud.dataproc.v1beta2.PrestoJob presto_job = 12 [(.google.api.field_behavior) = OPTIONAL]; + * */ com.google.cloud.dataproc.v1beta2.PrestoJobOrBuilder getPrestoJobOrBuilder(); diff --git a/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/PySparkJob.java b/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/PySparkJob.java index 27a1ad60..51182ae3 100644 --- a/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/PySparkJob.java +++ b/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/PySparkJob.java @@ -483,8 +483,8 @@ public com.google.protobuf.ByteString getJarFileUrisBytes(int index) { * * *
-   * Optional. HCFS URIs of files to be copied to the working directory of
-   * Python drivers and distributed tasks. Useful for naively parallel tasks.
+   * Optional. HCFS URIs of files to be placed in the working directory of
+   * each executor. Useful for naively parallel tasks.
    * 
* * repeated string file_uris = 5 [(.google.api.field_behavior) = OPTIONAL]; @@ -498,8 +498,8 @@ public com.google.protobuf.ProtocolStringList getFileUrisList() { * * *
-   * Optional. HCFS URIs of files to be copied to the working directory of
-   * Python drivers and distributed tasks. Useful for naively parallel tasks.
+   * Optional. HCFS URIs of files to be placed in the working directory of
+   * each executor. Useful for naively parallel tasks.
    * 
* * repeated string file_uris = 5 [(.google.api.field_behavior) = OPTIONAL]; @@ -513,8 +513,8 @@ public int getFileUrisCount() { * * *
-   * Optional. HCFS URIs of files to be copied to the working directory of
-   * Python drivers and distributed tasks. Useful for naively parallel tasks.
+   * Optional. HCFS URIs of files to be placed in the working directory of
+   * each executor. Useful for naively parallel tasks.
    * 
* * repeated string file_uris = 5 [(.google.api.field_behavior) = OPTIONAL]; @@ -529,8 +529,8 @@ public java.lang.String getFileUris(int index) { * * *
-   * Optional. HCFS URIs of files to be copied to the working directory of
-   * Python drivers and distributed tasks. Useful for naively parallel tasks.
+   * Optional. HCFS URIs of files to be placed in the working directory of
+   * each executor. Useful for naively parallel tasks.
    * 
* * repeated string file_uris = 5 [(.google.api.field_behavior) = OPTIONAL]; @@ -548,7 +548,8 @@ public com.google.protobuf.ByteString getFileUrisBytes(int index) { * * *
-   * Optional. HCFS URIs of archives to be extracted in the working directory of
+   * Optional. HCFS URIs of archives to be extracted into the working directory
+   * of each executor. Supported file types:
    * .jar, .tar, .tar.gz, .tgz, and .zip.
    * 
* @@ -563,7 +564,8 @@ public com.google.protobuf.ProtocolStringList getArchiveUrisList() { * * *
-   * Optional. HCFS URIs of archives to be extracted in the working directory of
+   * Optional. HCFS URIs of archives to be extracted into the working directory
+   * of each executor. Supported file types:
    * .jar, .tar, .tar.gz, .tgz, and .zip.
    * 
* @@ -578,7 +580,8 @@ public int getArchiveUrisCount() { * * *
-   * Optional. HCFS URIs of archives to be extracted in the working directory of
+   * Optional. HCFS URIs of archives to be extracted into the working directory
+   * of each executor. Supported file types:
    * .jar, .tar, .tar.gz, .tgz, and .zip.
    * 
* @@ -594,7 +597,8 @@ public java.lang.String getArchiveUris(int index) { * * *
-   * Optional. HCFS URIs of archives to be extracted in the working directory of
+   * Optional. HCFS URIs of archives to be extracted into the working directory
+   * of each executor. Supported file types:
    * .jar, .tar, .tar.gz, .tgz, and .zip.
    * 
* @@ -1996,8 +2000,8 @@ private void ensureFileUrisIsMutable() { * * *
-     * Optional. HCFS URIs of files to be copied to the working directory of
-     * Python drivers and distributed tasks. Useful for naively parallel tasks.
+     * Optional. HCFS URIs of files to be placed in the working directory of
+     * each executor. Useful for naively parallel tasks.
      * 
* * repeated string file_uris = 5 [(.google.api.field_behavior) = OPTIONAL]; @@ -2011,8 +2015,8 @@ public com.google.protobuf.ProtocolStringList getFileUrisList() { * * *
-     * Optional. HCFS URIs of files to be copied to the working directory of
-     * Python drivers and distributed tasks. Useful for naively parallel tasks.
+     * Optional. HCFS URIs of files to be placed in the working directory of
+     * each executor. Useful for naively parallel tasks.
      * 
* * repeated string file_uris = 5 [(.google.api.field_behavior) = OPTIONAL]; @@ -2026,8 +2030,8 @@ public int getFileUrisCount() { * * *
-     * Optional. HCFS URIs of files to be copied to the working directory of
-     * Python drivers and distributed tasks. Useful for naively parallel tasks.
+     * Optional. HCFS URIs of files to be placed in the working directory of
+     * each executor. Useful for naively parallel tasks.
      * 
* * repeated string file_uris = 5 [(.google.api.field_behavior) = OPTIONAL]; @@ -2042,8 +2046,8 @@ public java.lang.String getFileUris(int index) { * * *
-     * Optional. HCFS URIs of files to be copied to the working directory of
-     * Python drivers and distributed tasks. Useful for naively parallel tasks.
+     * Optional. HCFS URIs of files to be placed in the working directory of
+     * each executor. Useful for naively parallel tasks.
      * 
* * repeated string file_uris = 5 [(.google.api.field_behavior) = OPTIONAL]; @@ -2058,8 +2062,8 @@ public com.google.protobuf.ByteString getFileUrisBytes(int index) { * * *
-     * Optional. HCFS URIs of files to be copied to the working directory of
-     * Python drivers and distributed tasks. Useful for naively parallel tasks.
+     * Optional. HCFS URIs of files to be placed in the working directory of
+     * each executor. Useful for naively parallel tasks.
      * 
* * repeated string file_uris = 5 [(.google.api.field_behavior) = OPTIONAL]; @@ -2081,8 +2085,8 @@ public Builder setFileUris(int index, java.lang.String value) { * * *
-     * Optional. HCFS URIs of files to be copied to the working directory of
-     * Python drivers and distributed tasks. Useful for naively parallel tasks.
+     * Optional. HCFS URIs of files to be placed in the working directory of
+     * each executor. Useful for naively parallel tasks.
      * 
* * repeated string file_uris = 5 [(.google.api.field_behavior) = OPTIONAL]; @@ -2103,8 +2107,8 @@ public Builder addFileUris(java.lang.String value) { * * *
-     * Optional. HCFS URIs of files to be copied to the working directory of
-     * Python drivers and distributed tasks. Useful for naively parallel tasks.
+     * Optional. HCFS URIs of files to be placed in the working directory of
+     * each executor. Useful for naively parallel tasks.
      * 
* * repeated string file_uris = 5 [(.google.api.field_behavior) = OPTIONAL]; @@ -2122,8 +2126,8 @@ public Builder addAllFileUris(java.lang.Iterable values) { * * *
-     * Optional. HCFS URIs of files to be copied to the working directory of
-     * Python drivers and distributed tasks. Useful for naively parallel tasks.
+     * Optional. HCFS URIs of files to be placed in the working directory of
+     * each executor. Useful for naively parallel tasks.
      * 
* * repeated string file_uris = 5 [(.google.api.field_behavior) = OPTIONAL]; @@ -2140,8 +2144,8 @@ public Builder clearFileUris() { * * *
-     * Optional. HCFS URIs of files to be copied to the working directory of
-     * Python drivers and distributed tasks. Useful for naively parallel tasks.
+     * Optional. HCFS URIs of files to be placed in the working directory of
+     * each executor. Useful for naively parallel tasks.
      * 
* * repeated string file_uris = 5 [(.google.api.field_behavior) = OPTIONAL]; @@ -2173,7 +2177,8 @@ private void ensureArchiveUrisIsMutable() { * * *
-     * Optional. HCFS URIs of archives to be extracted in the working directory of
+     * Optional. HCFS URIs of archives to be extracted into the working directory
+     * of each executor. Supported file types:
      * .jar, .tar, .tar.gz, .tgz, and .zip.
      * 
* @@ -2188,7 +2193,8 @@ public com.google.protobuf.ProtocolStringList getArchiveUrisList() { * * *
-     * Optional. HCFS URIs of archives to be extracted in the working directory of
+     * Optional. HCFS URIs of archives to be extracted into the working directory
+     * of each executor. Supported file types:
      * .jar, .tar, .tar.gz, .tgz, and .zip.
      * 
* @@ -2203,7 +2209,8 @@ public int getArchiveUrisCount() { * * *
-     * Optional. HCFS URIs of archives to be extracted in the working directory of
+     * Optional. HCFS URIs of archives to be extracted into the working directory
+     * of each executor. Supported file types:
      * .jar, .tar, .tar.gz, .tgz, and .zip.
      * 
* @@ -2219,7 +2226,8 @@ public java.lang.String getArchiveUris(int index) { * * *
-     * Optional. HCFS URIs of archives to be extracted in the working directory of
+     * Optional. HCFS URIs of archives to be extracted into the working directory
+     * of each executor. Supported file types:
      * .jar, .tar, .tar.gz, .tgz, and .zip.
      * 
* @@ -2235,7 +2243,8 @@ public com.google.protobuf.ByteString getArchiveUrisBytes(int index) { * * *
-     * Optional. HCFS URIs of archives to be extracted in the working directory of
+     * Optional. HCFS URIs of archives to be extracted into the working directory
+     * of each executor. Supported file types:
      * .jar, .tar, .tar.gz, .tgz, and .zip.
      * 
* @@ -2258,7 +2267,8 @@ public Builder setArchiveUris(int index, java.lang.String value) { * * *
-     * Optional. HCFS URIs of archives to be extracted in the working directory of
+     * Optional. HCFS URIs of archives to be extracted into the working directory
+     * of each executor. Supported file types:
      * .jar, .tar, .tar.gz, .tgz, and .zip.
      * 
* @@ -2280,7 +2290,8 @@ public Builder addArchiveUris(java.lang.String value) { * * *
-     * Optional. HCFS URIs of archives to be extracted in the working directory of
+     * Optional. HCFS URIs of archives to be extracted into the working directory
+     * of each executor. Supported file types:
      * .jar, .tar, .tar.gz, .tgz, and .zip.
      * 
* @@ -2299,7 +2310,8 @@ public Builder addAllArchiveUris(java.lang.Iterable values) { * * *
-     * Optional. HCFS URIs of archives to be extracted in the working directory of
+     * Optional. HCFS URIs of archives to be extracted into the working directory
+     * of each executor. Supported file types:
      * .jar, .tar, .tar.gz, .tgz, and .zip.
      * 
* @@ -2317,7 +2329,8 @@ public Builder clearArchiveUris() { * * *
-     * Optional. HCFS URIs of archives to be extracted in the working directory of
+     * Optional. HCFS URIs of archives to be extracted into the working directory
+     * of each executor. Supported file types:
      * .jar, .tar, .tar.gz, .tgz, and .zip.
      * 
* diff --git a/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/PySparkJobOrBuilder.java b/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/PySparkJobOrBuilder.java index 1f52a82e..61921ac5 100644 --- a/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/PySparkJobOrBuilder.java +++ b/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/PySparkJobOrBuilder.java @@ -223,8 +223,8 @@ public interface PySparkJobOrBuilder * * *
-   * Optional. HCFS URIs of files to be copied to the working directory of
-   * Python drivers and distributed tasks. Useful for naively parallel tasks.
+   * Optional. HCFS URIs of files to be placed in the working directory of
+   * each executor. Useful for naively parallel tasks.
    * 
* * repeated string file_uris = 5 [(.google.api.field_behavior) = OPTIONAL]; @@ -236,8 +236,8 @@ public interface PySparkJobOrBuilder * * *
-   * Optional. HCFS URIs of files to be copied to the working directory of
-   * Python drivers and distributed tasks. Useful for naively parallel tasks.
+   * Optional. HCFS URIs of files to be placed in the working directory of
+   * each executor. Useful for naively parallel tasks.
    * 
* * repeated string file_uris = 5 [(.google.api.field_behavior) = OPTIONAL]; @@ -249,8 +249,8 @@ public interface PySparkJobOrBuilder * * *
-   * Optional. HCFS URIs of files to be copied to the working directory of
-   * Python drivers and distributed tasks. Useful for naively parallel tasks.
+   * Optional. HCFS URIs of files to be placed in the working directory of
+   * each executor. Useful for naively parallel tasks.
    * 
* * repeated string file_uris = 5 [(.google.api.field_behavior) = OPTIONAL]; @@ -263,8 +263,8 @@ public interface PySparkJobOrBuilder * * *
-   * Optional. HCFS URIs of files to be copied to the working directory of
-   * Python drivers and distributed tasks. Useful for naively parallel tasks.
+   * Optional. HCFS URIs of files to be placed in the working directory of
+   * each executor. Useful for naively parallel tasks.
    * 
* * repeated string file_uris = 5 [(.google.api.field_behavior) = OPTIONAL]; @@ -278,7 +278,8 @@ public interface PySparkJobOrBuilder * * *
-   * Optional. HCFS URIs of archives to be extracted in the working directory of
+   * Optional. HCFS URIs of archives to be extracted into the working directory
+   * of each executor. Supported file types:
    * .jar, .tar, .tar.gz, .tgz, and .zip.
    * 
* @@ -291,7 +292,8 @@ public interface PySparkJobOrBuilder * * *
-   * Optional. HCFS URIs of archives to be extracted in the working directory of
+   * Optional. HCFS URIs of archives to be extracted into the working directory
+   * of each executor. Supported file types:
    * .jar, .tar, .tar.gz, .tgz, and .zip.
    * 
* @@ -304,7 +306,8 @@ public interface PySparkJobOrBuilder * * *
-   * Optional. HCFS URIs of archives to be extracted in the working directory of
+   * Optional. HCFS URIs of archives to be extracted into the working directory
+   * of each executor. Supported file types:
    * .jar, .tar, .tar.gz, .tgz, and .zip.
    * 
* @@ -318,7 +321,8 @@ public interface PySparkJobOrBuilder * * *
-   * Optional. HCFS URIs of archives to be extracted in the working directory of
+   * Optional. HCFS URIs of archives to be extracted into the working directory
+   * of each executor. Supported file types:
    * .jar, .tar, .tar.gz, .tgz, and .zip.
    * 
* diff --git a/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/RegionName.java b/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/RegionName.java index 8d814573..3cd8a0cf 100644 --- a/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/RegionName.java +++ b/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/RegionName.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -23,19 +23,29 @@ import java.util.ArrayList; import java.util.List; import java.util.Map; +import java.util.Objects; +import javax.annotation.Generated; -/** AUTO-GENERATED DOCUMENTATION AND CLASS */ -@javax.annotation.Generated("by GAPIC protoc plugin") +// AUTO-GENERATED DOCUMENTATION AND CLASS. +@Generated("by gapic-generator-java") public class RegionName implements ResourceName { - - private static final PathTemplate PATH_TEMPLATE = + private static final PathTemplate PROJECT_REGION = PathTemplate.createWithoutUrlEncoding("projects/{project}/regions/{region}"); - private volatile Map fieldValuesMap; - private final String project; private final String region; + @Deprecated + protected RegionName() { + project = null; + region = null; + } + + private RegionName(Builder builder) { + project = Preconditions.checkNotNull(builder.getProject()); + region = Preconditions.checkNotNull(builder.getRegion()); + } + public String getProject() { return project; } @@ -52,11 +62,6 @@ public Builder toBuilder() { return new Builder(this); } - private RegionName(Builder builder) { - project = Preconditions.checkNotNull(builder.getProject()); - region = Preconditions.checkNotNull(builder.getRegion()); - } - public static RegionName of(String project, String region) { return newBuilder().setProject(project).setRegion(region).build(); } @@ -70,7 +75,7 @@ public static RegionName parse(String formattedString) { return null; } Map matchMap = - PATH_TEMPLATE.validatedMatch( + PROJECT_REGION.validatedMatch( formattedString, "RegionName.parse: formattedString not in valid format"); return of(matchMap.get("project"), matchMap.get("region")); } @@ -84,7 +89,7 @@ public static List parseList(List formattedStrings) { } public static List toStringList(List values) { - List list = new ArrayList(values.size()); + List list = new ArrayList<>(values.size()); for (RegionName value : values) { if (value == null) { list.add(""); @@ -96,16 +101,21 @@ public static List toStringList(List values) { } public static boolean isParsableFrom(String formattedString) { - return PATH_TEMPLATE.matches(formattedString); + return PROJECT_REGION.matches(formattedString); } + @Override public Map getFieldValuesMap() { if (fieldValuesMap == null) { synchronized (this) { if (fieldValuesMap == null) { ImmutableMap.Builder fieldMapBuilder = ImmutableMap.builder(); - fieldMapBuilder.put("project", project); - fieldMapBuilder.put("region", region); + if (project != null) { + fieldMapBuilder.put("project", project); + } + if (region != null) { + fieldMapBuilder.put("region", region); + } fieldValuesMap = fieldMapBuilder.build(); } } @@ -119,15 +129,38 @@ public String getFieldValue(String fieldName) { @Override public String toString() { - return PATH_TEMPLATE.instantiate("project", project, "region", region); + return PROJECT_REGION.instantiate("project", project, "region", region); } - /** Builder for RegionName. */ - public static class Builder { + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o != null || getClass() == o.getClass()) { + RegionName that = ((RegionName) o); + return Objects.equals(this.project, that.project) && Objects.equals(this.region, that.region); + } + return false; + } + @Override + public int hashCode() { + int h = 1; + h *= 1000003; + h ^= Objects.hashCode(project); + h *= 1000003; + h ^= Objects.hashCode(region); + return h; + } + + /** Builder for projects/{project}/regions/{region}. */ + public static class Builder { private String project; private String region; + protected Builder() {} + public String getProject() { return project; } @@ -146,8 +179,6 @@ public Builder setRegion(String region) { return this; } - private Builder() {} - private Builder(RegionName regionName) { project = regionName.project; region = regionName.region; @@ -157,26 +188,4 @@ public RegionName build() { return new RegionName(this); } } - - @Override - public boolean equals(Object o) { - if (o == this) { - return true; - } - if (o instanceof RegionName) { - RegionName that = (RegionName) o; - return (this.project.equals(that.project)) && (this.region.equals(that.region)); - } - return false; - } - - @Override - public int hashCode() { - int h = 1; - h *= 1000003; - h ^= project.hashCode(); - h *= 1000003; - h ^= region.hashCode(); - return h; - } } diff --git a/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/SharedProto.java b/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/SharedProto.java index 58e8dbe5..af1211ff 100644 --- a/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/SharedProto.java +++ b/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/SharedProto.java @@ -37,15 +37,15 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { java.lang.String[] descriptorData = { "\n*google/cloud/dataproc/v1beta2/shared.p" + "roto\022\035google.cloud.dataproc.v1beta2\032\034goo" - + "gle/api/annotations.proto*\302\001\n\tComponent\022" + + "gle/api/annotations.proto*\266\001\n\tComponent\022" + "\031\n\025COMPONENT_UNSPECIFIED\020\000\022\014\n\010ANACONDA\020\005" - + "\022\n\n\006DOCKER\020\r\022\t\n\005DRUID\020\t\022\t\n\005FLINK\020\016\022\020\n\014HI" - + "VE_WEBHCAT\020\003\022\013\n\007JUPYTER\020\001\022\014\n\010KERBEROS\020\007\022" - + "\n\n\006PRESTO\020\006\022\n\n\006RANGER\020\014\022\010\n\004SOLR\020\n\022\014\n\010ZEP" - + "PELIN\020\004\022\r\n\tZOOKEEPER\020\010By\n!com.google.clo" - + "ud.dataproc.v1beta2B\013SharedProtoP\001ZEgoog" - + "le.golang.org/genproto/googleapis/cloud/" - + "dataproc/v1beta2;dataprocb\006proto3" + + "\022\t\n\005DRUID\020\t\022\t\n\005HBASE\020\013\022\020\n\014HIVE_WEBHCAT\020\003" + + "\022\013\n\007JUPYTER\020\001\022\014\n\010KERBEROS\020\007\022\n\n\006PRESTO\020\006\022" + + "\n\n\006RANGER\020\014\022\010\n\004SOLR\020\n\022\014\n\010ZEPPELIN\020\004\022\r\n\tZ" + + "OOKEEPER\020\010By\n!com.google.cloud.dataproc." + + "v1beta2B\013SharedProtoP\001ZEgoogle.golang.or" + + "g/genproto/googleapis/cloud/dataproc/v1b" + + "eta2;dataprocb\006proto3" }; descriptor = com.google.protobuf.Descriptors.FileDescriptor.internalBuildGeneratedFileFrom( diff --git a/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/SoftwareConfig.java b/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/SoftwareConfig.java index 5eaad162..78d40dd8 100644 --- a/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/SoftwareConfig.java +++ b/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/SoftwareConfig.java @@ -175,7 +175,7 @@ protected com.google.protobuf.MapField internalGetMapField(int number) { *
    * Optional. The version of software inside the cluster. It must be one of the
    * supported [Dataproc
-   * Versions](https://cloud.google.com/dataproc/docs/concepts/versioning/dataproc-versions#supported_cloud_dataproc_versions),
+   * Versions](https://cloud.google.com/dataproc/docs/concepts/versioning/dataproc-versions#supported_dataproc_versions),
    * such as "1.2" (including a subminor version, such as "1.2.29"), or the
    * ["preview"
    * version](https://cloud.google.com/dataproc/docs/concepts/versioning/dataproc-versions#other_versions).
@@ -204,7 +204,7 @@ public java.lang.String getImageVersion() {
    * 
    * Optional. The version of software inside the cluster. It must be one of the
    * supported [Dataproc
-   * Versions](https://cloud.google.com/dataproc/docs/concepts/versioning/dataproc-versions#supported_cloud_dataproc_versions),
+   * Versions](https://cloud.google.com/dataproc/docs/concepts/versioning/dataproc-versions#supported_dataproc_versions),
    * such as "1.2" (including a subminor version, such as "1.2.29"), or the
    * ["preview"
    * version](https://cloud.google.com/dataproc/docs/concepts/versioning/dataproc-versions#other_versions).
@@ -896,7 +896,7 @@ public Builder mergeFrom(
      * 
      * Optional. The version of software inside the cluster. It must be one of the
      * supported [Dataproc
-     * Versions](https://cloud.google.com/dataproc/docs/concepts/versioning/dataproc-versions#supported_cloud_dataproc_versions),
+     * Versions](https://cloud.google.com/dataproc/docs/concepts/versioning/dataproc-versions#supported_dataproc_versions),
      * such as "1.2" (including a subminor version, such as "1.2.29"), or the
      * ["preview"
      * version](https://cloud.google.com/dataproc/docs/concepts/versioning/dataproc-versions#other_versions).
@@ -924,7 +924,7 @@ public java.lang.String getImageVersion() {
      * 
      * Optional. The version of software inside the cluster. It must be one of the
      * supported [Dataproc
-     * Versions](https://cloud.google.com/dataproc/docs/concepts/versioning/dataproc-versions#supported_cloud_dataproc_versions),
+     * Versions](https://cloud.google.com/dataproc/docs/concepts/versioning/dataproc-versions#supported_dataproc_versions),
      * such as "1.2" (including a subminor version, such as "1.2.29"), or the
      * ["preview"
      * version](https://cloud.google.com/dataproc/docs/concepts/versioning/dataproc-versions#other_versions).
@@ -952,7 +952,7 @@ public com.google.protobuf.ByteString getImageVersionBytes() {
      * 
      * Optional. The version of software inside the cluster. It must be one of the
      * supported [Dataproc
-     * Versions](https://cloud.google.com/dataproc/docs/concepts/versioning/dataproc-versions#supported_cloud_dataproc_versions),
+     * Versions](https://cloud.google.com/dataproc/docs/concepts/versioning/dataproc-versions#supported_dataproc_versions),
      * such as "1.2" (including a subminor version, such as "1.2.29"), or the
      * ["preview"
      * version](https://cloud.google.com/dataproc/docs/concepts/versioning/dataproc-versions#other_versions).
@@ -979,7 +979,7 @@ public Builder setImageVersion(java.lang.String value) {
      * 
      * Optional. The version of software inside the cluster. It must be one of the
      * supported [Dataproc
-     * Versions](https://cloud.google.com/dataproc/docs/concepts/versioning/dataproc-versions#supported_cloud_dataproc_versions),
+     * Versions](https://cloud.google.com/dataproc/docs/concepts/versioning/dataproc-versions#supported_dataproc_versions),
      * such as "1.2" (including a subminor version, such as "1.2.29"), or the
      * ["preview"
      * version](https://cloud.google.com/dataproc/docs/concepts/versioning/dataproc-versions#other_versions).
@@ -1002,7 +1002,7 @@ public Builder clearImageVersion() {
      * 
      * Optional. The version of software inside the cluster. It must be one of the
      * supported [Dataproc
-     * Versions](https://cloud.google.com/dataproc/docs/concepts/versioning/dataproc-versions#supported_cloud_dataproc_versions),
+     * Versions](https://cloud.google.com/dataproc/docs/concepts/versioning/dataproc-versions#supported_dataproc_versions),
      * such as "1.2" (including a subminor version, such as "1.2.29"), or the
      * ["preview"
      * version](https://cloud.google.com/dataproc/docs/concepts/versioning/dataproc-versions#other_versions).
diff --git a/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/SoftwareConfigOrBuilder.java b/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/SoftwareConfigOrBuilder.java
index 6b2f2fef..29b851ca 100644
--- a/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/SoftwareConfigOrBuilder.java
+++ b/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/SoftwareConfigOrBuilder.java
@@ -29,7 +29,7 @@ public interface SoftwareConfigOrBuilder
    * 
    * Optional. The version of software inside the cluster. It must be one of the
    * supported [Dataproc
-   * Versions](https://cloud.google.com/dataproc/docs/concepts/versioning/dataproc-versions#supported_cloud_dataproc_versions),
+   * Versions](https://cloud.google.com/dataproc/docs/concepts/versioning/dataproc-versions#supported_dataproc_versions),
    * such as "1.2" (including a subminor version, such as "1.2.29"), or the
    * ["preview"
    * version](https://cloud.google.com/dataproc/docs/concepts/versioning/dataproc-versions#other_versions).
@@ -47,7 +47,7 @@ public interface SoftwareConfigOrBuilder
    * 
    * Optional. The version of software inside the cluster. It must be one of the
    * supported [Dataproc
-   * Versions](https://cloud.google.com/dataproc/docs/concepts/versioning/dataproc-versions#supported_cloud_dataproc_versions),
+   * Versions](https://cloud.google.com/dataproc/docs/concepts/versioning/dataproc-versions#supported_dataproc_versions),
    * such as "1.2" (including a subminor version, such as "1.2.29"), or the
    * ["preview"
    * version](https://cloud.google.com/dataproc/docs/concepts/versioning/dataproc-versions#other_versions).
diff --git a/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/SparkJob.java b/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/SparkJob.java
index 8dec085f..98672cc5 100644
--- a/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/SparkJob.java
+++ b/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/SparkJob.java
@@ -523,8 +523,8 @@ public com.google.protobuf.ByteString getJarFileUrisBytes(int index) {
    *
    *
    * 
-   * Optional. HCFS URIs of files to be copied to the working directory of
-   * Spark drivers and distributed tasks. Useful for naively parallel tasks.
+   * Optional. HCFS URIs of files to be placed in the working directory of
+   * each executor. Useful for naively parallel tasks.
    * 
* * repeated string file_uris = 5 [(.google.api.field_behavior) = OPTIONAL]; @@ -538,8 +538,8 @@ public com.google.protobuf.ProtocolStringList getFileUrisList() { * * *
-   * Optional. HCFS URIs of files to be copied to the working directory of
-   * Spark drivers and distributed tasks. Useful for naively parallel tasks.
+   * Optional. HCFS URIs of files to be placed in the working directory of
+   * each executor. Useful for naively parallel tasks.
    * 
* * repeated string file_uris = 5 [(.google.api.field_behavior) = OPTIONAL]; @@ -553,8 +553,8 @@ public int getFileUrisCount() { * * *
-   * Optional. HCFS URIs of files to be copied to the working directory of
-   * Spark drivers and distributed tasks. Useful for naively parallel tasks.
+   * Optional. HCFS URIs of files to be placed in the working directory of
+   * each executor. Useful for naively parallel tasks.
    * 
* * repeated string file_uris = 5 [(.google.api.field_behavior) = OPTIONAL]; @@ -569,8 +569,8 @@ public java.lang.String getFileUris(int index) { * * *
-   * Optional. HCFS URIs of files to be copied to the working directory of
-   * Spark drivers and distributed tasks. Useful for naively parallel tasks.
+   * Optional. HCFS URIs of files to be placed in the working directory of
+   * each executor. Useful for naively parallel tasks.
    * 
* * repeated string file_uris = 5 [(.google.api.field_behavior) = OPTIONAL]; @@ -588,8 +588,8 @@ public com.google.protobuf.ByteString getFileUrisBytes(int index) { * * *
-   * Optional. HCFS URIs of archives to be extracted in the working directory
-   * of Spark drivers and tasks. Supported file types:
+   * Optional. HCFS URIs of archives to be extracted into the working directory
+   * of each executor. Supported file types:
    * .jar, .tar, .tar.gz, .tgz, and .zip.
    * 
* @@ -604,8 +604,8 @@ public com.google.protobuf.ProtocolStringList getArchiveUrisList() { * * *
-   * Optional. HCFS URIs of archives to be extracted in the working directory
-   * of Spark drivers and tasks. Supported file types:
+   * Optional. HCFS URIs of archives to be extracted into the working directory
+   * of each executor. Supported file types:
    * .jar, .tar, .tar.gz, .tgz, and .zip.
    * 
* @@ -620,8 +620,8 @@ public int getArchiveUrisCount() { * * *
-   * Optional. HCFS URIs of archives to be extracted in the working directory
-   * of Spark drivers and tasks. Supported file types:
+   * Optional. HCFS URIs of archives to be extracted into the working directory
+   * of each executor. Supported file types:
    * .jar, .tar, .tar.gz, .tgz, and .zip.
    * 
* @@ -637,8 +637,8 @@ public java.lang.String getArchiveUris(int index) { * * *
-   * Optional. HCFS URIs of archives to be extracted in the working directory
-   * of Spark drivers and tasks. Supported file types:
+   * Optional. HCFS URIs of archives to be extracted into the working directory
+   * of each executor. Supported file types:
    * .jar, .tar, .tar.gz, .tgz, and .zip.
    * 
* @@ -2027,8 +2027,8 @@ private void ensureFileUrisIsMutable() { * * *
-     * Optional. HCFS URIs of files to be copied to the working directory of
-     * Spark drivers and distributed tasks. Useful for naively parallel tasks.
+     * Optional. HCFS URIs of files to be placed in the working directory of
+     * each executor. Useful for naively parallel tasks.
      * 
* * repeated string file_uris = 5 [(.google.api.field_behavior) = OPTIONAL]; @@ -2042,8 +2042,8 @@ public com.google.protobuf.ProtocolStringList getFileUrisList() { * * *
-     * Optional. HCFS URIs of files to be copied to the working directory of
-     * Spark drivers and distributed tasks. Useful for naively parallel tasks.
+     * Optional. HCFS URIs of files to be placed in the working directory of
+     * each executor. Useful for naively parallel tasks.
      * 
* * repeated string file_uris = 5 [(.google.api.field_behavior) = OPTIONAL]; @@ -2057,8 +2057,8 @@ public int getFileUrisCount() { * * *
-     * Optional. HCFS URIs of files to be copied to the working directory of
-     * Spark drivers and distributed tasks. Useful for naively parallel tasks.
+     * Optional. HCFS URIs of files to be placed in the working directory of
+     * each executor. Useful for naively parallel tasks.
      * 
* * repeated string file_uris = 5 [(.google.api.field_behavior) = OPTIONAL]; @@ -2073,8 +2073,8 @@ public java.lang.String getFileUris(int index) { * * *
-     * Optional. HCFS URIs of files to be copied to the working directory of
-     * Spark drivers and distributed tasks. Useful for naively parallel tasks.
+     * Optional. HCFS URIs of files to be placed in the working directory of
+     * each executor. Useful for naively parallel tasks.
      * 
* * repeated string file_uris = 5 [(.google.api.field_behavior) = OPTIONAL]; @@ -2089,8 +2089,8 @@ public com.google.protobuf.ByteString getFileUrisBytes(int index) { * * *
-     * Optional. HCFS URIs of files to be copied to the working directory of
-     * Spark drivers and distributed tasks. Useful for naively parallel tasks.
+     * Optional. HCFS URIs of files to be placed in the working directory of
+     * each executor. Useful for naively parallel tasks.
      * 
* * repeated string file_uris = 5 [(.google.api.field_behavior) = OPTIONAL]; @@ -2112,8 +2112,8 @@ public Builder setFileUris(int index, java.lang.String value) { * * *
-     * Optional. HCFS URIs of files to be copied to the working directory of
-     * Spark drivers and distributed tasks. Useful for naively parallel tasks.
+     * Optional. HCFS URIs of files to be placed in the working directory of
+     * each executor. Useful for naively parallel tasks.
      * 
* * repeated string file_uris = 5 [(.google.api.field_behavior) = OPTIONAL]; @@ -2134,8 +2134,8 @@ public Builder addFileUris(java.lang.String value) { * * *
-     * Optional. HCFS URIs of files to be copied to the working directory of
-     * Spark drivers and distributed tasks. Useful for naively parallel tasks.
+     * Optional. HCFS URIs of files to be placed in the working directory of
+     * each executor. Useful for naively parallel tasks.
      * 
* * repeated string file_uris = 5 [(.google.api.field_behavior) = OPTIONAL]; @@ -2153,8 +2153,8 @@ public Builder addAllFileUris(java.lang.Iterable values) { * * *
-     * Optional. HCFS URIs of files to be copied to the working directory of
-     * Spark drivers and distributed tasks. Useful for naively parallel tasks.
+     * Optional. HCFS URIs of files to be placed in the working directory of
+     * each executor. Useful for naively parallel tasks.
      * 
* * repeated string file_uris = 5 [(.google.api.field_behavior) = OPTIONAL]; @@ -2171,8 +2171,8 @@ public Builder clearFileUris() { * * *
-     * Optional. HCFS URIs of files to be copied to the working directory of
-     * Spark drivers and distributed tasks. Useful for naively parallel tasks.
+     * Optional. HCFS URIs of files to be placed in the working directory of
+     * each executor. Useful for naively parallel tasks.
      * 
* * repeated string file_uris = 5 [(.google.api.field_behavior) = OPTIONAL]; @@ -2204,8 +2204,8 @@ private void ensureArchiveUrisIsMutable() { * * *
-     * Optional. HCFS URIs of archives to be extracted in the working directory
-     * of Spark drivers and tasks. Supported file types:
+     * Optional. HCFS URIs of archives to be extracted into the working directory
+     * of each executor. Supported file types:
      * .jar, .tar, .tar.gz, .tgz, and .zip.
      * 
* @@ -2220,8 +2220,8 @@ public com.google.protobuf.ProtocolStringList getArchiveUrisList() { * * *
-     * Optional. HCFS URIs of archives to be extracted in the working directory
-     * of Spark drivers and tasks. Supported file types:
+     * Optional. HCFS URIs of archives to be extracted into the working directory
+     * of each executor. Supported file types:
      * .jar, .tar, .tar.gz, .tgz, and .zip.
      * 
* @@ -2236,8 +2236,8 @@ public int getArchiveUrisCount() { * * *
-     * Optional. HCFS URIs of archives to be extracted in the working directory
-     * of Spark drivers and tasks. Supported file types:
+     * Optional. HCFS URIs of archives to be extracted into the working directory
+     * of each executor. Supported file types:
      * .jar, .tar, .tar.gz, .tgz, and .zip.
      * 
* @@ -2253,8 +2253,8 @@ public java.lang.String getArchiveUris(int index) { * * *
-     * Optional. HCFS URIs of archives to be extracted in the working directory
-     * of Spark drivers and tasks. Supported file types:
+     * Optional. HCFS URIs of archives to be extracted into the working directory
+     * of each executor. Supported file types:
      * .jar, .tar, .tar.gz, .tgz, and .zip.
      * 
* @@ -2270,8 +2270,8 @@ public com.google.protobuf.ByteString getArchiveUrisBytes(int index) { * * *
-     * Optional. HCFS URIs of archives to be extracted in the working directory
-     * of Spark drivers and tasks. Supported file types:
+     * Optional. HCFS URIs of archives to be extracted into the working directory
+     * of each executor. Supported file types:
      * .jar, .tar, .tar.gz, .tgz, and .zip.
      * 
* @@ -2294,8 +2294,8 @@ public Builder setArchiveUris(int index, java.lang.String value) { * * *
-     * Optional. HCFS URIs of archives to be extracted in the working directory
-     * of Spark drivers and tasks. Supported file types:
+     * Optional. HCFS URIs of archives to be extracted into the working directory
+     * of each executor. Supported file types:
      * .jar, .tar, .tar.gz, .tgz, and .zip.
      * 
* @@ -2317,8 +2317,8 @@ public Builder addArchiveUris(java.lang.String value) { * * *
-     * Optional. HCFS URIs of archives to be extracted in the working directory
-     * of Spark drivers and tasks. Supported file types:
+     * Optional. HCFS URIs of archives to be extracted into the working directory
+     * of each executor. Supported file types:
      * .jar, .tar, .tar.gz, .tgz, and .zip.
      * 
* @@ -2337,8 +2337,8 @@ public Builder addAllArchiveUris(java.lang.Iterable values) { * * *
-     * Optional. HCFS URIs of archives to be extracted in the working directory
-     * of Spark drivers and tasks. Supported file types:
+     * Optional. HCFS URIs of archives to be extracted into the working directory
+     * of each executor. Supported file types:
      * .jar, .tar, .tar.gz, .tgz, and .zip.
      * 
* @@ -2356,8 +2356,8 @@ public Builder clearArchiveUris() { * * *
-     * Optional. HCFS URIs of archives to be extracted in the working directory
-     * of Spark drivers and tasks. Supported file types:
+     * Optional. HCFS URIs of archives to be extracted into the working directory
+     * of each executor. Supported file types:
      * .jar, .tar, .tar.gz, .tgz, and .zip.
      * 
* diff --git a/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/SparkJobOrBuilder.java b/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/SparkJobOrBuilder.java index 9a524b6e..198adb65 100644 --- a/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/SparkJobOrBuilder.java +++ b/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/SparkJobOrBuilder.java @@ -193,8 +193,8 @@ public interface SparkJobOrBuilder * * *
-   * Optional. HCFS URIs of files to be copied to the working directory of
-   * Spark drivers and distributed tasks. Useful for naively parallel tasks.
+   * Optional. HCFS URIs of files to be placed in the working directory of
+   * each executor. Useful for naively parallel tasks.
    * 
* * repeated string file_uris = 5 [(.google.api.field_behavior) = OPTIONAL]; @@ -206,8 +206,8 @@ public interface SparkJobOrBuilder * * *
-   * Optional. HCFS URIs of files to be copied to the working directory of
-   * Spark drivers and distributed tasks. Useful for naively parallel tasks.
+   * Optional. HCFS URIs of files to be placed in the working directory of
+   * each executor. Useful for naively parallel tasks.
    * 
* * repeated string file_uris = 5 [(.google.api.field_behavior) = OPTIONAL]; @@ -219,8 +219,8 @@ public interface SparkJobOrBuilder * * *
-   * Optional. HCFS URIs of files to be copied to the working directory of
-   * Spark drivers and distributed tasks. Useful for naively parallel tasks.
+   * Optional. HCFS URIs of files to be placed in the working directory of
+   * each executor. Useful for naively parallel tasks.
    * 
* * repeated string file_uris = 5 [(.google.api.field_behavior) = OPTIONAL]; @@ -233,8 +233,8 @@ public interface SparkJobOrBuilder * * *
-   * Optional. HCFS URIs of files to be copied to the working directory of
-   * Spark drivers and distributed tasks. Useful for naively parallel tasks.
+   * Optional. HCFS URIs of files to be placed in the working directory of
+   * each executor. Useful for naively parallel tasks.
    * 
* * repeated string file_uris = 5 [(.google.api.field_behavior) = OPTIONAL]; @@ -248,8 +248,8 @@ public interface SparkJobOrBuilder * * *
-   * Optional. HCFS URIs of archives to be extracted in the working directory
-   * of Spark drivers and tasks. Supported file types:
+   * Optional. HCFS URIs of archives to be extracted into the working directory
+   * of each executor. Supported file types:
    * .jar, .tar, .tar.gz, .tgz, and .zip.
    * 
* @@ -262,8 +262,8 @@ public interface SparkJobOrBuilder * * *
-   * Optional. HCFS URIs of archives to be extracted in the working directory
-   * of Spark drivers and tasks. Supported file types:
+   * Optional. HCFS URIs of archives to be extracted into the working directory
+   * of each executor. Supported file types:
    * .jar, .tar, .tar.gz, .tgz, and .zip.
    * 
* @@ -276,8 +276,8 @@ public interface SparkJobOrBuilder * * *
-   * Optional. HCFS URIs of archives to be extracted in the working directory
-   * of Spark drivers and tasks. Supported file types:
+   * Optional. HCFS URIs of archives to be extracted into the working directory
+   * of each executor. Supported file types:
    * .jar, .tar, .tar.gz, .tgz, and .zip.
    * 
* @@ -291,8 +291,8 @@ public interface SparkJobOrBuilder * * *
-   * Optional. HCFS URIs of archives to be extracted in the working directory
-   * of Spark drivers and tasks. Supported file types:
+   * Optional. HCFS URIs of archives to be extracted into the working directory
+   * of each executor. Supported file types:
    * .jar, .tar, .tar.gz, .tgz, and .zip.
    * 
* diff --git a/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/SparkRJob.java b/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/SparkRJob.java index f56d0518..197e281b 100644 --- a/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/SparkRJob.java +++ b/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/SparkRJob.java @@ -324,8 +324,8 @@ public com.google.protobuf.ByteString getArgsBytes(int index) { * * *
-   * Optional. HCFS URIs of files to be copied to the working directory of
-   * R drivers and distributed tasks. Useful for naively parallel tasks.
+   * Optional. HCFS URIs of files to be placed in the working directory of
+   * each executor. Useful for naively parallel tasks.
    * 
* * repeated string file_uris = 3 [(.google.api.field_behavior) = OPTIONAL]; @@ -339,8 +339,8 @@ public com.google.protobuf.ProtocolStringList getFileUrisList() { * * *
-   * Optional. HCFS URIs of files to be copied to the working directory of
-   * R drivers and distributed tasks. Useful for naively parallel tasks.
+   * Optional. HCFS URIs of files to be placed in the working directory of
+   * each executor. Useful for naively parallel tasks.
    * 
* * repeated string file_uris = 3 [(.google.api.field_behavior) = OPTIONAL]; @@ -354,8 +354,8 @@ public int getFileUrisCount() { * * *
-   * Optional. HCFS URIs of files to be copied to the working directory of
-   * R drivers and distributed tasks. Useful for naively parallel tasks.
+   * Optional. HCFS URIs of files to be placed in the working directory of
+   * each executor. Useful for naively parallel tasks.
    * 
* * repeated string file_uris = 3 [(.google.api.field_behavior) = OPTIONAL]; @@ -370,8 +370,8 @@ public java.lang.String getFileUris(int index) { * * *
-   * Optional. HCFS URIs of files to be copied to the working directory of
-   * R drivers and distributed tasks. Useful for naively parallel tasks.
+   * Optional. HCFS URIs of files to be placed in the working directory of
+   * each executor. Useful for naively parallel tasks.
    * 
* * repeated string file_uris = 3 [(.google.api.field_behavior) = OPTIONAL]; @@ -389,8 +389,8 @@ public com.google.protobuf.ByteString getFileUrisBytes(int index) { * * *
-   * Optional. HCFS URIs of archives to be extracted in the working directory of
-   * Spark drivers and tasks. Supported file types:
+   * Optional. HCFS URIs of archives to be extracted into the working directory
+   * of each executor. Supported file types:
    * .jar, .tar, .tar.gz, .tgz, and .zip.
    * 
* @@ -405,8 +405,8 @@ public com.google.protobuf.ProtocolStringList getArchiveUrisList() { * * *
-   * Optional. HCFS URIs of archives to be extracted in the working directory of
-   * Spark drivers and tasks. Supported file types:
+   * Optional. HCFS URIs of archives to be extracted into the working directory
+   * of each executor. Supported file types:
    * .jar, .tar, .tar.gz, .tgz, and .zip.
    * 
* @@ -421,8 +421,8 @@ public int getArchiveUrisCount() { * * *
-   * Optional. HCFS URIs of archives to be extracted in the working directory of
-   * Spark drivers and tasks. Supported file types:
+   * Optional. HCFS URIs of archives to be extracted into the working directory
+   * of each executor. Supported file types:
    * .jar, .tar, .tar.gz, .tgz, and .zip.
    * 
* @@ -438,8 +438,8 @@ public java.lang.String getArchiveUris(int index) { * * *
-   * Optional. HCFS URIs of archives to be extracted in the working directory of
-   * Spark drivers and tasks. Supported file types:
+   * Optional. HCFS URIs of archives to be extracted into the working directory
+   * of each executor. Supported file types:
    * .jar, .tar, .tar.gz, .tgz, and .zip.
    * 
* @@ -1420,8 +1420,8 @@ private void ensureFileUrisIsMutable() { * * *
-     * Optional. HCFS URIs of files to be copied to the working directory of
-     * R drivers and distributed tasks. Useful for naively parallel tasks.
+     * Optional. HCFS URIs of files to be placed in the working directory of
+     * each executor. Useful for naively parallel tasks.
      * 
* * repeated string file_uris = 3 [(.google.api.field_behavior) = OPTIONAL]; @@ -1435,8 +1435,8 @@ public com.google.protobuf.ProtocolStringList getFileUrisList() { * * *
-     * Optional. HCFS URIs of files to be copied to the working directory of
-     * R drivers and distributed tasks. Useful for naively parallel tasks.
+     * Optional. HCFS URIs of files to be placed in the working directory of
+     * each executor. Useful for naively parallel tasks.
      * 
* * repeated string file_uris = 3 [(.google.api.field_behavior) = OPTIONAL]; @@ -1450,8 +1450,8 @@ public int getFileUrisCount() { * * *
-     * Optional. HCFS URIs of files to be copied to the working directory of
-     * R drivers and distributed tasks. Useful for naively parallel tasks.
+     * Optional. HCFS URIs of files to be placed in the working directory of
+     * each executor. Useful for naively parallel tasks.
      * 
* * repeated string file_uris = 3 [(.google.api.field_behavior) = OPTIONAL]; @@ -1466,8 +1466,8 @@ public java.lang.String getFileUris(int index) { * * *
-     * Optional. HCFS URIs of files to be copied to the working directory of
-     * R drivers and distributed tasks. Useful for naively parallel tasks.
+     * Optional. HCFS URIs of files to be placed in the working directory of
+     * each executor. Useful for naively parallel tasks.
      * 
* * repeated string file_uris = 3 [(.google.api.field_behavior) = OPTIONAL]; @@ -1482,8 +1482,8 @@ public com.google.protobuf.ByteString getFileUrisBytes(int index) { * * *
-     * Optional. HCFS URIs of files to be copied to the working directory of
-     * R drivers and distributed tasks. Useful for naively parallel tasks.
+     * Optional. HCFS URIs of files to be placed in the working directory of
+     * each executor. Useful for naively parallel tasks.
      * 
* * repeated string file_uris = 3 [(.google.api.field_behavior) = OPTIONAL]; @@ -1505,8 +1505,8 @@ public Builder setFileUris(int index, java.lang.String value) { * * *
-     * Optional. HCFS URIs of files to be copied to the working directory of
-     * R drivers and distributed tasks. Useful for naively parallel tasks.
+     * Optional. HCFS URIs of files to be placed in the working directory of
+     * each executor. Useful for naively parallel tasks.
      * 
* * repeated string file_uris = 3 [(.google.api.field_behavior) = OPTIONAL]; @@ -1527,8 +1527,8 @@ public Builder addFileUris(java.lang.String value) { * * *
-     * Optional. HCFS URIs of files to be copied to the working directory of
-     * R drivers and distributed tasks. Useful for naively parallel tasks.
+     * Optional. HCFS URIs of files to be placed in the working directory of
+     * each executor. Useful for naively parallel tasks.
      * 
* * repeated string file_uris = 3 [(.google.api.field_behavior) = OPTIONAL]; @@ -1546,8 +1546,8 @@ public Builder addAllFileUris(java.lang.Iterable values) { * * *
-     * Optional. HCFS URIs of files to be copied to the working directory of
-     * R drivers and distributed tasks. Useful for naively parallel tasks.
+     * Optional. HCFS URIs of files to be placed in the working directory of
+     * each executor. Useful for naively parallel tasks.
      * 
* * repeated string file_uris = 3 [(.google.api.field_behavior) = OPTIONAL]; @@ -1564,8 +1564,8 @@ public Builder clearFileUris() { * * *
-     * Optional. HCFS URIs of files to be copied to the working directory of
-     * R drivers and distributed tasks. Useful for naively parallel tasks.
+     * Optional. HCFS URIs of files to be placed in the working directory of
+     * each executor. Useful for naively parallel tasks.
      * 
* * repeated string file_uris = 3 [(.google.api.field_behavior) = OPTIONAL]; @@ -1597,8 +1597,8 @@ private void ensureArchiveUrisIsMutable() { * * *
-     * Optional. HCFS URIs of archives to be extracted in the working directory of
-     * Spark drivers and tasks. Supported file types:
+     * Optional. HCFS URIs of archives to be extracted into the working directory
+     * of each executor. Supported file types:
      * .jar, .tar, .tar.gz, .tgz, and .zip.
      * 
* @@ -1613,8 +1613,8 @@ public com.google.protobuf.ProtocolStringList getArchiveUrisList() { * * *
-     * Optional. HCFS URIs of archives to be extracted in the working directory of
-     * Spark drivers and tasks. Supported file types:
+     * Optional. HCFS URIs of archives to be extracted into the working directory
+     * of each executor. Supported file types:
      * .jar, .tar, .tar.gz, .tgz, and .zip.
      * 
* @@ -1629,8 +1629,8 @@ public int getArchiveUrisCount() { * * *
-     * Optional. HCFS URIs of archives to be extracted in the working directory of
-     * Spark drivers and tasks. Supported file types:
+     * Optional. HCFS URIs of archives to be extracted into the working directory
+     * of each executor. Supported file types:
      * .jar, .tar, .tar.gz, .tgz, and .zip.
      * 
* @@ -1646,8 +1646,8 @@ public java.lang.String getArchiveUris(int index) { * * *
-     * Optional. HCFS URIs of archives to be extracted in the working directory of
-     * Spark drivers and tasks. Supported file types:
+     * Optional. HCFS URIs of archives to be extracted into the working directory
+     * of each executor. Supported file types:
      * .jar, .tar, .tar.gz, .tgz, and .zip.
      * 
* @@ -1663,8 +1663,8 @@ public com.google.protobuf.ByteString getArchiveUrisBytes(int index) { * * *
-     * Optional. HCFS URIs of archives to be extracted in the working directory of
-     * Spark drivers and tasks. Supported file types:
+     * Optional. HCFS URIs of archives to be extracted into the working directory
+     * of each executor. Supported file types:
      * .jar, .tar, .tar.gz, .tgz, and .zip.
      * 
* @@ -1687,8 +1687,8 @@ public Builder setArchiveUris(int index, java.lang.String value) { * * *
-     * Optional. HCFS URIs of archives to be extracted in the working directory of
-     * Spark drivers and tasks. Supported file types:
+     * Optional. HCFS URIs of archives to be extracted into the working directory
+     * of each executor. Supported file types:
      * .jar, .tar, .tar.gz, .tgz, and .zip.
      * 
* @@ -1710,8 +1710,8 @@ public Builder addArchiveUris(java.lang.String value) { * * *
-     * Optional. HCFS URIs of archives to be extracted in the working directory of
-     * Spark drivers and tasks. Supported file types:
+     * Optional. HCFS URIs of archives to be extracted into the working directory
+     * of each executor. Supported file types:
      * .jar, .tar, .tar.gz, .tgz, and .zip.
      * 
* @@ -1730,8 +1730,8 @@ public Builder addAllArchiveUris(java.lang.Iterable values) { * * *
-     * Optional. HCFS URIs of archives to be extracted in the working directory of
-     * Spark drivers and tasks. Supported file types:
+     * Optional. HCFS URIs of archives to be extracted into the working directory
+     * of each executor. Supported file types:
      * .jar, .tar, .tar.gz, .tgz, and .zip.
      * 
* @@ -1749,8 +1749,8 @@ public Builder clearArchiveUris() { * * *
-     * Optional. HCFS URIs of archives to be extracted in the working directory of
-     * Spark drivers and tasks. Supported file types:
+     * Optional. HCFS URIs of archives to be extracted into the working directory
+     * of each executor. Supported file types:
      * .jar, .tar, .tar.gz, .tgz, and .zip.
      * 
* diff --git a/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/SparkRJobOrBuilder.java b/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/SparkRJobOrBuilder.java index 03957053..574e79f3 100644 --- a/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/SparkRJobOrBuilder.java +++ b/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/SparkRJobOrBuilder.java @@ -113,8 +113,8 @@ public interface SparkRJobOrBuilder * * *
-   * Optional. HCFS URIs of files to be copied to the working directory of
-   * R drivers and distributed tasks. Useful for naively parallel tasks.
+   * Optional. HCFS URIs of files to be placed in the working directory of
+   * each executor. Useful for naively parallel tasks.
    * 
* * repeated string file_uris = 3 [(.google.api.field_behavior) = OPTIONAL]; @@ -126,8 +126,8 @@ public interface SparkRJobOrBuilder * * *
-   * Optional. HCFS URIs of files to be copied to the working directory of
-   * R drivers and distributed tasks. Useful for naively parallel tasks.
+   * Optional. HCFS URIs of files to be placed in the working directory of
+   * each executor. Useful for naively parallel tasks.
    * 
* * repeated string file_uris = 3 [(.google.api.field_behavior) = OPTIONAL]; @@ -139,8 +139,8 @@ public interface SparkRJobOrBuilder * * *
-   * Optional. HCFS URIs of files to be copied to the working directory of
-   * R drivers and distributed tasks. Useful for naively parallel tasks.
+   * Optional. HCFS URIs of files to be placed in the working directory of
+   * each executor. Useful for naively parallel tasks.
    * 
* * repeated string file_uris = 3 [(.google.api.field_behavior) = OPTIONAL]; @@ -153,8 +153,8 @@ public interface SparkRJobOrBuilder * * *
-   * Optional. HCFS URIs of files to be copied to the working directory of
-   * R drivers and distributed tasks. Useful for naively parallel tasks.
+   * Optional. HCFS URIs of files to be placed in the working directory of
+   * each executor. Useful for naively parallel tasks.
    * 
* * repeated string file_uris = 3 [(.google.api.field_behavior) = OPTIONAL]; @@ -168,8 +168,8 @@ public interface SparkRJobOrBuilder * * *
-   * Optional. HCFS URIs of archives to be extracted in the working directory of
-   * Spark drivers and tasks. Supported file types:
+   * Optional. HCFS URIs of archives to be extracted into the working directory
+   * of each executor. Supported file types:
    * .jar, .tar, .tar.gz, .tgz, and .zip.
    * 
* @@ -182,8 +182,8 @@ public interface SparkRJobOrBuilder * * *
-   * Optional. HCFS URIs of archives to be extracted in the working directory of
-   * Spark drivers and tasks. Supported file types:
+   * Optional. HCFS URIs of archives to be extracted into the working directory
+   * of each executor. Supported file types:
    * .jar, .tar, .tar.gz, .tgz, and .zip.
    * 
* @@ -196,8 +196,8 @@ public interface SparkRJobOrBuilder * * *
-   * Optional. HCFS URIs of archives to be extracted in the working directory of
-   * Spark drivers and tasks. Supported file types:
+   * Optional. HCFS URIs of archives to be extracted into the working directory
+   * of each executor. Supported file types:
    * .jar, .tar, .tar.gz, .tgz, and .zip.
    * 
* @@ -211,8 +211,8 @@ public interface SparkRJobOrBuilder * * *
-   * Optional. HCFS URIs of archives to be extracted in the working directory of
-   * Spark drivers and tasks. Supported file types:
+   * Optional. HCFS URIs of archives to be extracted into the working directory
+   * of each executor. Supported file types:
    * .jar, .tar, .tar.gz, .tgz, and .zip.
    * 
* diff --git a/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/TemplateParameter.java b/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/TemplateParameter.java index 8dfffc12..c933d772 100644 --- a/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/TemplateParameter.java +++ b/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/TemplateParameter.java @@ -229,10 +229,10 @@ public com.google.protobuf.ByteString getNameBytes() { * Required. Paths to all fields that the parameter replaces. * A field is allowed to appear in at most one parameter's list of field * paths. - * A field path is similar in syntax to a [google.protobuf.FieldMask][google.protobuf.FieldMask]. - * For example, a field path that references the zone field of a workflow - * template's cluster selector would be specified as - * `placement.clusterSelector.zone`. + * A field path is similar in syntax to a + * [google.protobuf.FieldMask][google.protobuf.FieldMask]. For example, a + * field path that references the zone field of a workflow template's cluster + * selector would be specified as `placement.clusterSelector.zone`. * Also, field paths can reference fields using the following syntax: * * Values in maps can be referenced by key: * * labels['key'] @@ -278,10 +278,10 @@ public com.google.protobuf.ProtocolStringList getFieldsList() { * Required. Paths to all fields that the parameter replaces. * A field is allowed to appear in at most one parameter's list of field * paths. - * A field path is similar in syntax to a [google.protobuf.FieldMask][google.protobuf.FieldMask]. - * For example, a field path that references the zone field of a workflow - * template's cluster selector would be specified as - * `placement.clusterSelector.zone`. + * A field path is similar in syntax to a + * [google.protobuf.FieldMask][google.protobuf.FieldMask]. For example, a + * field path that references the zone field of a workflow template's cluster + * selector would be specified as `placement.clusterSelector.zone`. * Also, field paths can reference fields using the following syntax: * * Values in maps can be referenced by key: * * labels['key'] @@ -327,10 +327,10 @@ public int getFieldsCount() { * Required. Paths to all fields that the parameter replaces. * A field is allowed to appear in at most one parameter's list of field * paths. - * A field path is similar in syntax to a [google.protobuf.FieldMask][google.protobuf.FieldMask]. - * For example, a field path that references the zone field of a workflow - * template's cluster selector would be specified as - * `placement.clusterSelector.zone`. + * A field path is similar in syntax to a + * [google.protobuf.FieldMask][google.protobuf.FieldMask]. For example, a + * field path that references the zone field of a workflow template's cluster + * selector would be specified as `placement.clusterSelector.zone`. * Also, field paths can reference fields using the following syntax: * * Values in maps can be referenced by key: * * labels['key'] @@ -377,10 +377,10 @@ public java.lang.String getFields(int index) { * Required. Paths to all fields that the parameter replaces. * A field is allowed to appear in at most one parameter's list of field * paths. - * A field path is similar in syntax to a [google.protobuf.FieldMask][google.protobuf.FieldMask]. - * For example, a field path that references the zone field of a workflow - * template's cluster selector would be specified as - * `placement.clusterSelector.zone`. + * A field path is similar in syntax to a + * [google.protobuf.FieldMask][google.protobuf.FieldMask]. For example, a + * field path that references the zone field of a workflow template's cluster + * selector would be specified as `placement.clusterSelector.zone`. * Also, field paths can reference fields using the following syntax: * * Values in maps can be referenced by key: * * labels['key'] @@ -1078,10 +1078,10 @@ private void ensureFieldsIsMutable() { * Required. Paths to all fields that the parameter replaces. * A field is allowed to appear in at most one parameter's list of field * paths. - * A field path is similar in syntax to a [google.protobuf.FieldMask][google.protobuf.FieldMask]. - * For example, a field path that references the zone field of a workflow - * template's cluster selector would be specified as - * `placement.clusterSelector.zone`. + * A field path is similar in syntax to a + * [google.protobuf.FieldMask][google.protobuf.FieldMask]. For example, a + * field path that references the zone field of a workflow template's cluster + * selector would be specified as `placement.clusterSelector.zone`. * Also, field paths can reference fields using the following syntax: * * Values in maps can be referenced by key: * * labels['key'] @@ -1127,10 +1127,10 @@ public com.google.protobuf.ProtocolStringList getFieldsList() { * Required. Paths to all fields that the parameter replaces. * A field is allowed to appear in at most one parameter's list of field * paths. - * A field path is similar in syntax to a [google.protobuf.FieldMask][google.protobuf.FieldMask]. - * For example, a field path that references the zone field of a workflow - * template's cluster selector would be specified as - * `placement.clusterSelector.zone`. + * A field path is similar in syntax to a + * [google.protobuf.FieldMask][google.protobuf.FieldMask]. For example, a + * field path that references the zone field of a workflow template's cluster + * selector would be specified as `placement.clusterSelector.zone`. * Also, field paths can reference fields using the following syntax: * * Values in maps can be referenced by key: * * labels['key'] @@ -1176,10 +1176,10 @@ public int getFieldsCount() { * Required. Paths to all fields that the parameter replaces. * A field is allowed to appear in at most one parameter's list of field * paths. - * A field path is similar in syntax to a [google.protobuf.FieldMask][google.protobuf.FieldMask]. - * For example, a field path that references the zone field of a workflow - * template's cluster selector would be specified as - * `placement.clusterSelector.zone`. + * A field path is similar in syntax to a + * [google.protobuf.FieldMask][google.protobuf.FieldMask]. For example, a + * field path that references the zone field of a workflow template's cluster + * selector would be specified as `placement.clusterSelector.zone`. * Also, field paths can reference fields using the following syntax: * * Values in maps can be referenced by key: * * labels['key'] @@ -1226,10 +1226,10 @@ public java.lang.String getFields(int index) { * Required. Paths to all fields that the parameter replaces. * A field is allowed to appear in at most one parameter's list of field * paths. - * A field path is similar in syntax to a [google.protobuf.FieldMask][google.protobuf.FieldMask]. - * For example, a field path that references the zone field of a workflow - * template's cluster selector would be specified as - * `placement.clusterSelector.zone`. + * A field path is similar in syntax to a + * [google.protobuf.FieldMask][google.protobuf.FieldMask]. For example, a + * field path that references the zone field of a workflow template's cluster + * selector would be specified as `placement.clusterSelector.zone`. * Also, field paths can reference fields using the following syntax: * * Values in maps can be referenced by key: * * labels['key'] @@ -1276,10 +1276,10 @@ public com.google.protobuf.ByteString getFieldsBytes(int index) { * Required. Paths to all fields that the parameter replaces. * A field is allowed to appear in at most one parameter's list of field * paths. - * A field path is similar in syntax to a [google.protobuf.FieldMask][google.protobuf.FieldMask]. - * For example, a field path that references the zone field of a workflow - * template's cluster selector would be specified as - * `placement.clusterSelector.zone`. + * A field path is similar in syntax to a + * [google.protobuf.FieldMask][google.protobuf.FieldMask]. For example, a + * field path that references the zone field of a workflow template's cluster + * selector would be specified as `placement.clusterSelector.zone`. * Also, field paths can reference fields using the following syntax: * * Values in maps can be referenced by key: * * labels['key'] @@ -1333,10 +1333,10 @@ public Builder setFields(int index, java.lang.String value) { * Required. Paths to all fields that the parameter replaces. * A field is allowed to appear in at most one parameter's list of field * paths. - * A field path is similar in syntax to a [google.protobuf.FieldMask][google.protobuf.FieldMask]. - * For example, a field path that references the zone field of a workflow - * template's cluster selector would be specified as - * `placement.clusterSelector.zone`. + * A field path is similar in syntax to a + * [google.protobuf.FieldMask][google.protobuf.FieldMask]. For example, a + * field path that references the zone field of a workflow template's cluster + * selector would be specified as `placement.clusterSelector.zone`. * Also, field paths can reference fields using the following syntax: * * Values in maps can be referenced by key: * * labels['key'] @@ -1389,10 +1389,10 @@ public Builder addFields(java.lang.String value) { * Required. Paths to all fields that the parameter replaces. * A field is allowed to appear in at most one parameter's list of field * paths. - * A field path is similar in syntax to a [google.protobuf.FieldMask][google.protobuf.FieldMask]. - * For example, a field path that references the zone field of a workflow - * template's cluster selector would be specified as - * `placement.clusterSelector.zone`. + * A field path is similar in syntax to a + * [google.protobuf.FieldMask][google.protobuf.FieldMask]. For example, a + * field path that references the zone field of a workflow template's cluster + * selector would be specified as `placement.clusterSelector.zone`. * Also, field paths can reference fields using the following syntax: * * Values in maps can be referenced by key: * * labels['key'] @@ -1442,10 +1442,10 @@ public Builder addAllFields(java.lang.Iterable values) { * Required. Paths to all fields that the parameter replaces. * A field is allowed to appear in at most one parameter's list of field * paths. - * A field path is similar in syntax to a [google.protobuf.FieldMask][google.protobuf.FieldMask]. - * For example, a field path that references the zone field of a workflow - * template's cluster selector would be specified as - * `placement.clusterSelector.zone`. + * A field path is similar in syntax to a + * [google.protobuf.FieldMask][google.protobuf.FieldMask]. For example, a + * field path that references the zone field of a workflow template's cluster + * selector would be specified as `placement.clusterSelector.zone`. * Also, field paths can reference fields using the following syntax: * * Values in maps can be referenced by key: * * labels['key'] @@ -1494,10 +1494,10 @@ public Builder clearFields() { * Required. Paths to all fields that the parameter replaces. * A field is allowed to appear in at most one parameter's list of field * paths. - * A field path is similar in syntax to a [google.protobuf.FieldMask][google.protobuf.FieldMask]. - * For example, a field path that references the zone field of a workflow - * template's cluster selector would be specified as - * `placement.clusterSelector.zone`. + * A field path is similar in syntax to a + * [google.protobuf.FieldMask][google.protobuf.FieldMask]. For example, a + * field path that references the zone field of a workflow template's cluster + * selector would be specified as `placement.clusterSelector.zone`. * Also, field paths can reference fields using the following syntax: * * Values in maps can be referenced by key: * * labels['key'] diff --git a/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/TemplateParameterOrBuilder.java b/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/TemplateParameterOrBuilder.java index 93c43366..580307f3 100644 --- a/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/TemplateParameterOrBuilder.java +++ b/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/TemplateParameterOrBuilder.java @@ -67,10 +67,10 @@ public interface TemplateParameterOrBuilder * Required. Paths to all fields that the parameter replaces. * A field is allowed to appear in at most one parameter's list of field * paths. - * A field path is similar in syntax to a [google.protobuf.FieldMask][google.protobuf.FieldMask]. - * For example, a field path that references the zone field of a workflow - * template's cluster selector would be specified as - * `placement.clusterSelector.zone`. + * A field path is similar in syntax to a + * [google.protobuf.FieldMask][google.protobuf.FieldMask]. For example, a + * field path that references the zone field of a workflow template's cluster + * selector would be specified as `placement.clusterSelector.zone`. * Also, field paths can reference fields using the following syntax: * * Values in maps can be referenced by key: * * labels['key'] @@ -114,10 +114,10 @@ public interface TemplateParameterOrBuilder * Required. Paths to all fields that the parameter replaces. * A field is allowed to appear in at most one parameter's list of field * paths. - * A field path is similar in syntax to a [google.protobuf.FieldMask][google.protobuf.FieldMask]. - * For example, a field path that references the zone field of a workflow - * template's cluster selector would be specified as - * `placement.clusterSelector.zone`. + * A field path is similar in syntax to a + * [google.protobuf.FieldMask][google.protobuf.FieldMask]. For example, a + * field path that references the zone field of a workflow template's cluster + * selector would be specified as `placement.clusterSelector.zone`. * Also, field paths can reference fields using the following syntax: * * Values in maps can be referenced by key: * * labels['key'] @@ -161,10 +161,10 @@ public interface TemplateParameterOrBuilder * Required. Paths to all fields that the parameter replaces. * A field is allowed to appear in at most one parameter's list of field * paths. - * A field path is similar in syntax to a [google.protobuf.FieldMask][google.protobuf.FieldMask]. - * For example, a field path that references the zone field of a workflow - * template's cluster selector would be specified as - * `placement.clusterSelector.zone`. + * A field path is similar in syntax to a + * [google.protobuf.FieldMask][google.protobuf.FieldMask]. For example, a + * field path that references the zone field of a workflow template's cluster + * selector would be specified as `placement.clusterSelector.zone`. * Also, field paths can reference fields using the following syntax: * * Values in maps can be referenced by key: * * labels['key'] @@ -209,10 +209,10 @@ public interface TemplateParameterOrBuilder * Required. Paths to all fields that the parameter replaces. * A field is allowed to appear in at most one parameter's list of field * paths. - * A field path is similar in syntax to a [google.protobuf.FieldMask][google.protobuf.FieldMask]. - * For example, a field path that references the zone field of a workflow - * template's cluster selector would be specified as - * `placement.clusterSelector.zone`. + * A field path is similar in syntax to a + * [google.protobuf.FieldMask][google.protobuf.FieldMask]. For example, a + * field path that references the zone field of a workflow template's cluster + * selector would be specified as `placement.clusterSelector.zone`. * Also, field paths can reference fields using the following syntax: * * Values in maps can be referenced by key: * * labels['key'] diff --git a/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/UpdateClusterRequest.java b/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/UpdateClusterRequest.java index 3a55fc4f..ad919a25 100644 --- a/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/UpdateClusterRequest.java +++ b/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/UpdateClusterRequest.java @@ -680,10 +680,11 @@ public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { * *
    * Optional. A unique id used to identify the request. If the server
-   * receives two [UpdateClusterRequest][google.cloud.dataproc.v1beta2.UpdateClusterRequest] requests  with the same
-   * id, then the second request will be ignored and the
-   * first [google.longrunning.Operation][google.longrunning.Operation] created and stored in the
-   * backend is returned.
+   * receives two
+   * [UpdateClusterRequest][google.cloud.dataproc.v1beta2.UpdateClusterRequest]
+   * requests  with the same id, then the second request will be ignored and the
+   * first [google.longrunning.Operation][google.longrunning.Operation] created
+   * and stored in the backend is returned.
    * It is recommended to always set this value to a
    * [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier).
    * The id must contain only letters (a-z, A-Z), numbers (0-9),
@@ -711,10 +712,11 @@ public java.lang.String getRequestId() {
    *
    * 
    * Optional. A unique id used to identify the request. If the server
-   * receives two [UpdateClusterRequest][google.cloud.dataproc.v1beta2.UpdateClusterRequest] requests  with the same
-   * id, then the second request will be ignored and the
-   * first [google.longrunning.Operation][google.longrunning.Operation] created and stored in the
-   * backend is returned.
+   * receives two
+   * [UpdateClusterRequest][google.cloud.dataproc.v1beta2.UpdateClusterRequest]
+   * requests  with the same id, then the second request will be ignored and the
+   * first [google.longrunning.Operation][google.longrunning.Operation] created
+   * and stored in the backend is returned.
    * It is recommended to always set this value to a
    * [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier).
    * The id must contain only letters (a-z, A-Z), numbers (0-9),
@@ -2678,10 +2680,11 @@ public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
      *
      * 
      * Optional. A unique id used to identify the request. If the server
-     * receives two [UpdateClusterRequest][google.cloud.dataproc.v1beta2.UpdateClusterRequest] requests  with the same
-     * id, then the second request will be ignored and the
-     * first [google.longrunning.Operation][google.longrunning.Operation] created and stored in the
-     * backend is returned.
+     * receives two
+     * [UpdateClusterRequest][google.cloud.dataproc.v1beta2.UpdateClusterRequest]
+     * requests  with the same id, then the second request will be ignored and the
+     * first [google.longrunning.Operation][google.longrunning.Operation] created
+     * and stored in the backend is returned.
      * It is recommended to always set this value to a
      * [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier).
      * The id must contain only letters (a-z, A-Z), numbers (0-9),
@@ -2708,10 +2711,11 @@ public java.lang.String getRequestId() {
      *
      * 
      * Optional. A unique id used to identify the request. If the server
-     * receives two [UpdateClusterRequest][google.cloud.dataproc.v1beta2.UpdateClusterRequest] requests  with the same
-     * id, then the second request will be ignored and the
-     * first [google.longrunning.Operation][google.longrunning.Operation] created and stored in the
-     * backend is returned.
+     * receives two
+     * [UpdateClusterRequest][google.cloud.dataproc.v1beta2.UpdateClusterRequest]
+     * requests  with the same id, then the second request will be ignored and the
+     * first [google.longrunning.Operation][google.longrunning.Operation] created
+     * and stored in the backend is returned.
      * It is recommended to always set this value to a
      * [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier).
      * The id must contain only letters (a-z, A-Z), numbers (0-9),
@@ -2738,10 +2742,11 @@ public com.google.protobuf.ByteString getRequestIdBytes() {
      *
      * 
      * Optional. A unique id used to identify the request. If the server
-     * receives two [UpdateClusterRequest][google.cloud.dataproc.v1beta2.UpdateClusterRequest] requests  with the same
-     * id, then the second request will be ignored and the
-     * first [google.longrunning.Operation][google.longrunning.Operation] created and stored in the
-     * backend is returned.
+     * receives two
+     * [UpdateClusterRequest][google.cloud.dataproc.v1beta2.UpdateClusterRequest]
+     * requests  with the same id, then the second request will be ignored and the
+     * first [google.longrunning.Operation][google.longrunning.Operation] created
+     * and stored in the backend is returned.
      * It is recommended to always set this value to a
      * [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier).
      * The id must contain only letters (a-z, A-Z), numbers (0-9),
@@ -2767,10 +2772,11 @@ public Builder setRequestId(java.lang.String value) {
      *
      * 
      * Optional. A unique id used to identify the request. If the server
-     * receives two [UpdateClusterRequest][google.cloud.dataproc.v1beta2.UpdateClusterRequest] requests  with the same
-     * id, then the second request will be ignored and the
-     * first [google.longrunning.Operation][google.longrunning.Operation] created and stored in the
-     * backend is returned.
+     * receives two
+     * [UpdateClusterRequest][google.cloud.dataproc.v1beta2.UpdateClusterRequest]
+     * requests  with the same id, then the second request will be ignored and the
+     * first [google.longrunning.Operation][google.longrunning.Operation] created
+     * and stored in the backend is returned.
      * It is recommended to always set this value to a
      * [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier).
      * The id must contain only letters (a-z, A-Z), numbers (0-9),
@@ -2792,10 +2798,11 @@ public Builder clearRequestId() {
      *
      * 
      * Optional. A unique id used to identify the request. If the server
-     * receives two [UpdateClusterRequest][google.cloud.dataproc.v1beta2.UpdateClusterRequest] requests  with the same
-     * id, then the second request will be ignored and the
-     * first [google.longrunning.Operation][google.longrunning.Operation] created and stored in the
-     * backend is returned.
+     * receives two
+     * [UpdateClusterRequest][google.cloud.dataproc.v1beta2.UpdateClusterRequest]
+     * requests  with the same id, then the second request will be ignored and the
+     * first [google.longrunning.Operation][google.longrunning.Operation] created
+     * and stored in the backend is returned.
      * It is recommended to always set this value to a
      * [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier).
      * The id must contain only letters (a-z, A-Z), numbers (0-9),
diff --git a/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/UpdateClusterRequestOrBuilder.java b/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/UpdateClusterRequestOrBuilder.java
index ff07b68c..b3df24ce 100644
--- a/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/UpdateClusterRequestOrBuilder.java
+++ b/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/UpdateClusterRequestOrBuilder.java
@@ -411,10 +411,11 @@ public interface UpdateClusterRequestOrBuilder
    *
    * 
    * Optional. A unique id used to identify the request. If the server
-   * receives two [UpdateClusterRequest][google.cloud.dataproc.v1beta2.UpdateClusterRequest] requests  with the same
-   * id, then the second request will be ignored and the
-   * first [google.longrunning.Operation][google.longrunning.Operation] created and stored in the
-   * backend is returned.
+   * receives two
+   * [UpdateClusterRequest][google.cloud.dataproc.v1beta2.UpdateClusterRequest]
+   * requests  with the same id, then the second request will be ignored and the
+   * first [google.longrunning.Operation][google.longrunning.Operation] created
+   * and stored in the backend is returned.
    * It is recommended to always set this value to a
    * [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier).
    * The id must contain only letters (a-z, A-Z), numbers (0-9),
@@ -431,10 +432,11 @@ public interface UpdateClusterRequestOrBuilder
    *
    * 
    * Optional. A unique id used to identify the request. If the server
-   * receives two [UpdateClusterRequest][google.cloud.dataproc.v1beta2.UpdateClusterRequest] requests  with the same
-   * id, then the second request will be ignored and the
-   * first [google.longrunning.Operation][google.longrunning.Operation] created and stored in the
-   * backend is returned.
+   * receives two
+   * [UpdateClusterRequest][google.cloud.dataproc.v1beta2.UpdateClusterRequest]
+   * requests  with the same id, then the second request will be ignored and the
+   * first [google.longrunning.Operation][google.longrunning.Operation] created
+   * and stored in the backend is returned.
    * It is recommended to always set this value to a
    * [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier).
    * The id must contain only letters (a-z, A-Z), numbers (0-9),
diff --git a/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/WorkflowMetadata.java b/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/WorkflowMetadata.java
index 201bff3c..0505db7d 100644
--- a/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/WorkflowMetadata.java
+++ b/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/WorkflowMetadata.java
@@ -200,6 +200,51 @@ private WorkflowMetadata(
               java.lang.String s = input.readStringRequireUtf8();
 
               clusterUuid_ = s;
+              break;
+            }
+          case 98:
+            {
+              com.google.protobuf.Duration.Builder subBuilder = null;
+              if (dagTimeout_ != null) {
+                subBuilder = dagTimeout_.toBuilder();
+              }
+              dagTimeout_ =
+                  input.readMessage(com.google.protobuf.Duration.parser(), extensionRegistry);
+              if (subBuilder != null) {
+                subBuilder.mergeFrom(dagTimeout_);
+                dagTimeout_ = subBuilder.buildPartial();
+              }
+
+              break;
+            }
+          case 106:
+            {
+              com.google.protobuf.Timestamp.Builder subBuilder = null;
+              if (dagStartTime_ != null) {
+                subBuilder = dagStartTime_.toBuilder();
+              }
+              dagStartTime_ =
+                  input.readMessage(com.google.protobuf.Timestamp.parser(), extensionRegistry);
+              if (subBuilder != null) {
+                subBuilder.mergeFrom(dagStartTime_);
+                dagStartTime_ = subBuilder.buildPartial();
+              }
+
+              break;
+            }
+          case 114:
+            {
+              com.google.protobuf.Timestamp.Builder subBuilder = null;
+              if (dagEndTime_ != null) {
+                subBuilder = dagEndTime_.toBuilder();
+              }
+              dagEndTime_ =
+                  input.readMessage(com.google.protobuf.Timestamp.parser(), extensionRegistry);
+              if (subBuilder != null) {
+                subBuilder.mergeFrom(dagEndTime_);
+                dagEndTime_ = subBuilder.buildPartial();
+              }
+
               break;
             }
           default:
@@ -1009,6 +1054,185 @@ public com.google.protobuf.ByteString getClusterUuidBytes() {
     }
   }
 
+  public static final int DAG_TIMEOUT_FIELD_NUMBER = 12;
+  private com.google.protobuf.Duration dagTimeout_;
+  /**
+   *
+   *
+   * 
+   * Output only. The timeout duration for the DAG of jobs.
+   * Minimum timeout duration is 10 minutes and maximum is 24 hours, expressed
+   * as a
+   * [google.protobuf.Duration][https://developers.google.com/protocol-buffers/docs/proto3#json_mapping].
+   * For example, "1800" = 1800 seconds/30 minutes duration.
+   * 
+ * + * .google.protobuf.Duration dag_timeout = 12 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @return Whether the dagTimeout field is set. + */ + @java.lang.Override + public boolean hasDagTimeout() { + return dagTimeout_ != null; + } + /** + * + * + *
+   * Output only. The timeout duration for the DAG of jobs.
+   * Minimum timeout duration is 10 minutes and maximum is 24 hours, expressed
+   * as a
+   * [google.protobuf.Duration][https://developers.google.com/protocol-buffers/docs/proto3#json_mapping].
+   * For example, "1800" = 1800 seconds/30 minutes duration.
+   * 
+ * + * .google.protobuf.Duration dag_timeout = 12 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @return The dagTimeout. + */ + @java.lang.Override + public com.google.protobuf.Duration getDagTimeout() { + return dagTimeout_ == null ? com.google.protobuf.Duration.getDefaultInstance() : dagTimeout_; + } + /** + * + * + *
+   * Output only. The timeout duration for the DAG of jobs.
+   * Minimum timeout duration is 10 minutes and maximum is 24 hours, expressed
+   * as a
+   * [google.protobuf.Duration][https://developers.google.com/protocol-buffers/docs/proto3#json_mapping].
+   * For example, "1800" = 1800 seconds/30 minutes duration.
+   * 
+ * + * .google.protobuf.Duration dag_timeout = 12 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + @java.lang.Override + public com.google.protobuf.DurationOrBuilder getDagTimeoutOrBuilder() { + return getDagTimeout(); + } + + public static final int DAG_START_TIME_FIELD_NUMBER = 13; + private com.google.protobuf.Timestamp dagStartTime_; + /** + * + * + *
+   * Output only. DAG start time, which is only set for workflows with
+   * [dag_timeout][google.cloud.dataproc.v1beta2.WorkflowMetadata.dag_timeout]
+   * when the DAG begins.
+   * 
+ * + * + * .google.protobuf.Timestamp dag_start_time = 13 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @return Whether the dagStartTime field is set. + */ + @java.lang.Override + public boolean hasDagStartTime() { + return dagStartTime_ != null; + } + /** + * + * + *
+   * Output only. DAG start time, which is only set for workflows with
+   * [dag_timeout][google.cloud.dataproc.v1beta2.WorkflowMetadata.dag_timeout]
+   * when the DAG begins.
+   * 
+ * + * + * .google.protobuf.Timestamp dag_start_time = 13 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @return The dagStartTime. + */ + @java.lang.Override + public com.google.protobuf.Timestamp getDagStartTime() { + return dagStartTime_ == null + ? com.google.protobuf.Timestamp.getDefaultInstance() + : dagStartTime_; + } + /** + * + * + *
+   * Output only. DAG start time, which is only set for workflows with
+   * [dag_timeout][google.cloud.dataproc.v1beta2.WorkflowMetadata.dag_timeout]
+   * when the DAG begins.
+   * 
+ * + * + * .google.protobuf.Timestamp dag_start_time = 13 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + @java.lang.Override + public com.google.protobuf.TimestampOrBuilder getDagStartTimeOrBuilder() { + return getDagStartTime(); + } + + public static final int DAG_END_TIME_FIELD_NUMBER = 14; + private com.google.protobuf.Timestamp dagEndTime_; + /** + * + * + *
+   * Output only. DAG end time, which is only set for workflows with
+   * [dag_timeout][google.cloud.dataproc.v1beta2.WorkflowMetadata.dag_timeout]
+   * when the DAG ends.
+   * 
+ * + * + * .google.protobuf.Timestamp dag_end_time = 14 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @return Whether the dagEndTime field is set. + */ + @java.lang.Override + public boolean hasDagEndTime() { + return dagEndTime_ != null; + } + /** + * + * + *
+   * Output only. DAG end time, which is only set for workflows with
+   * [dag_timeout][google.cloud.dataproc.v1beta2.WorkflowMetadata.dag_timeout]
+   * when the DAG ends.
+   * 
+ * + * + * .google.protobuf.Timestamp dag_end_time = 14 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @return The dagEndTime. + */ + @java.lang.Override + public com.google.protobuf.Timestamp getDagEndTime() { + return dagEndTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : dagEndTime_; + } + /** + * + * + *
+   * Output only. DAG end time, which is only set for workflows with
+   * [dag_timeout][google.cloud.dataproc.v1beta2.WorkflowMetadata.dag_timeout]
+   * when the DAG ends.
+   * 
+ * + * + * .google.protobuf.Timestamp dag_end_time = 14 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + @java.lang.Override + public com.google.protobuf.TimestampOrBuilder getDagEndTimeOrBuilder() { + return getDagEndTime(); + } + private byte memoizedIsInitialized = -1; @java.lang.Override @@ -1055,6 +1279,15 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io if (!getClusterUuidBytes().isEmpty()) { com.google.protobuf.GeneratedMessageV3.writeString(output, 11, clusterUuid_); } + if (dagTimeout_ != null) { + output.writeMessage(12, getDagTimeout()); + } + if (dagStartTime_ != null) { + output.writeMessage(13, getDagStartTime()); + } + if (dagEndTime_ != null) { + output.writeMessage(14, getDagEndTime()); + } unknownFields.writeTo(output); } @@ -1104,6 +1337,15 @@ public int getSerializedSize() { if (!getClusterUuidBytes().isEmpty()) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(11, clusterUuid_); } + if (dagTimeout_ != null) { + size += com.google.protobuf.CodedOutputStream.computeMessageSize(12, getDagTimeout()); + } + if (dagStartTime_ != null) { + size += com.google.protobuf.CodedOutputStream.computeMessageSize(13, getDagStartTime()); + } + if (dagEndTime_ != null) { + size += com.google.protobuf.CodedOutputStream.computeMessageSize(14, getDagEndTime()); + } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; @@ -1146,6 +1388,18 @@ public boolean equals(final java.lang.Object obj) { if (!getEndTime().equals(other.getEndTime())) return false; } if (!getClusterUuid().equals(other.getClusterUuid())) return false; + if (hasDagTimeout() != other.hasDagTimeout()) return false; + if (hasDagTimeout()) { + if (!getDagTimeout().equals(other.getDagTimeout())) return false; + } + if (hasDagStartTime() != other.hasDagStartTime()) return false; + if (hasDagStartTime()) { + if (!getDagStartTime().equals(other.getDagStartTime())) return false; + } + if (hasDagEndTime() != other.hasDagEndTime()) return false; + if (hasDagEndTime()) { + if (!getDagEndTime().equals(other.getDagEndTime())) return false; + } if (!unknownFields.equals(other.unknownFields)) return false; return true; } @@ -1191,6 +1445,18 @@ public int hashCode() { } hash = (37 * hash) + CLUSTER_UUID_FIELD_NUMBER; hash = (53 * hash) + getClusterUuid().hashCode(); + if (hasDagTimeout()) { + hash = (37 * hash) + DAG_TIMEOUT_FIELD_NUMBER; + hash = (53 * hash) + getDagTimeout().hashCode(); + } + if (hasDagStartTime()) { + hash = (37 * hash) + DAG_START_TIME_FIELD_NUMBER; + hash = (53 * hash) + getDagStartTime().hashCode(); + } + if (hasDagEndTime()) { + hash = (37 * hash) + DAG_END_TIME_FIELD_NUMBER; + hash = (53 * hash) + getDagEndTime().hashCode(); + } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; @@ -1397,6 +1663,24 @@ public Builder clear() { } clusterUuid_ = ""; + if (dagTimeoutBuilder_ == null) { + dagTimeout_ = null; + } else { + dagTimeout_ = null; + dagTimeoutBuilder_ = null; + } + if (dagStartTimeBuilder_ == null) { + dagStartTime_ = null; + } else { + dagStartTime_ = null; + dagStartTimeBuilder_ = null; + } + if (dagEndTimeBuilder_ == null) { + dagEndTime_ = null; + } else { + dagEndTime_ = null; + dagEndTimeBuilder_ = null; + } return this; } @@ -1457,6 +1741,21 @@ public com.google.cloud.dataproc.v1beta2.WorkflowMetadata buildPartial() { result.endTime_ = endTimeBuilder_.build(); } result.clusterUuid_ = clusterUuid_; + if (dagTimeoutBuilder_ == null) { + result.dagTimeout_ = dagTimeout_; + } else { + result.dagTimeout_ = dagTimeoutBuilder_.build(); + } + if (dagStartTimeBuilder_ == null) { + result.dagStartTime_ = dagStartTime_; + } else { + result.dagStartTime_ = dagStartTimeBuilder_.build(); + } + if (dagEndTimeBuilder_ == null) { + result.dagEndTime_ = dagEndTime_; + } else { + result.dagEndTime_ = dagEndTimeBuilder_.build(); + } onBuilt(); return result; } @@ -1541,6 +1840,15 @@ public Builder mergeFrom(com.google.cloud.dataproc.v1beta2.WorkflowMetadata othe clusterUuid_ = other.clusterUuid_; onChanged(); } + if (other.hasDagTimeout()) { + mergeDagTimeout(other.getDagTimeout()); + } + if (other.hasDagStartTime()) { + mergeDagStartTime(other.getDagStartTime()); + } + if (other.hasDagEndTime()) { + mergeDagEndTime(other.getDagEndTime()); + } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; @@ -3232,6 +3540,683 @@ public Builder setClusterUuidBytes(com.google.protobuf.ByteString value) { return this; } + private com.google.protobuf.Duration dagTimeout_; + private com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.Duration, + com.google.protobuf.Duration.Builder, + com.google.protobuf.DurationOrBuilder> + dagTimeoutBuilder_; + /** + * + * + *
+     * Output only. The timeout duration for the DAG of jobs.
+     * Minimum timeout duration is 10 minutes and maximum is 24 hours, expressed
+     * as a
+     * [google.protobuf.Duration][https://developers.google.com/protocol-buffers/docs/proto3#json_mapping].
+     * For example, "1800" = 1800 seconds/30 minutes duration.
+     * 
+ * + * + * .google.protobuf.Duration dag_timeout = 12 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @return Whether the dagTimeout field is set. + */ + public boolean hasDagTimeout() { + return dagTimeoutBuilder_ != null || dagTimeout_ != null; + } + /** + * + * + *
+     * Output only. The timeout duration for the DAG of jobs.
+     * Minimum timeout duration is 10 minutes and maximum is 24 hours, expressed
+     * as a
+     * [google.protobuf.Duration][https://developers.google.com/protocol-buffers/docs/proto3#json_mapping].
+     * For example, "1800" = 1800 seconds/30 minutes duration.
+     * 
+ * + * + * .google.protobuf.Duration dag_timeout = 12 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @return The dagTimeout. + */ + public com.google.protobuf.Duration getDagTimeout() { + if (dagTimeoutBuilder_ == null) { + return dagTimeout_ == null + ? com.google.protobuf.Duration.getDefaultInstance() + : dagTimeout_; + } else { + return dagTimeoutBuilder_.getMessage(); + } + } + /** + * + * + *
+     * Output only. The timeout duration for the DAG of jobs.
+     * Minimum timeout duration is 10 minutes and maximum is 24 hours, expressed
+     * as a
+     * [google.protobuf.Duration][https://developers.google.com/protocol-buffers/docs/proto3#json_mapping].
+     * For example, "1800" = 1800 seconds/30 minutes duration.
+     * 
+ * + * + * .google.protobuf.Duration dag_timeout = 12 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public Builder setDagTimeout(com.google.protobuf.Duration value) { + if (dagTimeoutBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + dagTimeout_ = value; + onChanged(); + } else { + dagTimeoutBuilder_.setMessage(value); + } + + return this; + } + /** + * + * + *
+     * Output only. The timeout duration for the DAG of jobs.
+     * Minimum timeout duration is 10 minutes and maximum is 24 hours, expressed
+     * as a
+     * [google.protobuf.Duration][https://developers.google.com/protocol-buffers/docs/proto3#json_mapping].
+     * For example, "1800" = 1800 seconds/30 minutes duration.
+     * 
+ * + * + * .google.protobuf.Duration dag_timeout = 12 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public Builder setDagTimeout(com.google.protobuf.Duration.Builder builderForValue) { + if (dagTimeoutBuilder_ == null) { + dagTimeout_ = builderForValue.build(); + onChanged(); + } else { + dagTimeoutBuilder_.setMessage(builderForValue.build()); + } + + return this; + } + /** + * + * + *
+     * Output only. The timeout duration for the DAG of jobs.
+     * Minimum timeout duration is 10 minutes and maximum is 24 hours, expressed
+     * as a
+     * [google.protobuf.Duration][https://developers.google.com/protocol-buffers/docs/proto3#json_mapping].
+     * For example, "1800" = 1800 seconds/30 minutes duration.
+     * 
+ * + * + * .google.protobuf.Duration dag_timeout = 12 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public Builder mergeDagTimeout(com.google.protobuf.Duration value) { + if (dagTimeoutBuilder_ == null) { + if (dagTimeout_ != null) { + dagTimeout_ = + com.google.protobuf.Duration.newBuilder(dagTimeout_).mergeFrom(value).buildPartial(); + } else { + dagTimeout_ = value; + } + onChanged(); + } else { + dagTimeoutBuilder_.mergeFrom(value); + } + + return this; + } + /** + * + * + *
+     * Output only. The timeout duration for the DAG of jobs.
+     * Minimum timeout duration is 10 minutes and maximum is 24 hours, expressed
+     * as a
+     * [google.protobuf.Duration][https://developers.google.com/protocol-buffers/docs/proto3#json_mapping].
+     * For example, "1800" = 1800 seconds/30 minutes duration.
+     * 
+ * + * + * .google.protobuf.Duration dag_timeout = 12 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public Builder clearDagTimeout() { + if (dagTimeoutBuilder_ == null) { + dagTimeout_ = null; + onChanged(); + } else { + dagTimeout_ = null; + dagTimeoutBuilder_ = null; + } + + return this; + } + /** + * + * + *
+     * Output only. The timeout duration for the DAG of jobs.
+     * Minimum timeout duration is 10 minutes and maximum is 24 hours, expressed
+     * as a
+     * [google.protobuf.Duration][https://developers.google.com/protocol-buffers/docs/proto3#json_mapping].
+     * For example, "1800" = 1800 seconds/30 minutes duration.
+     * 
+ * + * + * .google.protobuf.Duration dag_timeout = 12 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public com.google.protobuf.Duration.Builder getDagTimeoutBuilder() { + + onChanged(); + return getDagTimeoutFieldBuilder().getBuilder(); + } + /** + * + * + *
+     * Output only. The timeout duration for the DAG of jobs.
+     * Minimum timeout duration is 10 minutes and maximum is 24 hours, expressed
+     * as a
+     * [google.protobuf.Duration][https://developers.google.com/protocol-buffers/docs/proto3#json_mapping].
+     * For example, "1800" = 1800 seconds/30 minutes duration.
+     * 
+ * + * + * .google.protobuf.Duration dag_timeout = 12 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public com.google.protobuf.DurationOrBuilder getDagTimeoutOrBuilder() { + if (dagTimeoutBuilder_ != null) { + return dagTimeoutBuilder_.getMessageOrBuilder(); + } else { + return dagTimeout_ == null + ? com.google.protobuf.Duration.getDefaultInstance() + : dagTimeout_; + } + } + /** + * + * + *
+     * Output only. The timeout duration for the DAG of jobs.
+     * Minimum timeout duration is 10 minutes and maximum is 24 hours, expressed
+     * as a
+     * [google.protobuf.Duration][https://developers.google.com/protocol-buffers/docs/proto3#json_mapping].
+     * For example, "1800" = 1800 seconds/30 minutes duration.
+     * 
+ * + * + * .google.protobuf.Duration dag_timeout = 12 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + private com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.Duration, + com.google.protobuf.Duration.Builder, + com.google.protobuf.DurationOrBuilder> + getDagTimeoutFieldBuilder() { + if (dagTimeoutBuilder_ == null) { + dagTimeoutBuilder_ = + new com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.Duration, + com.google.protobuf.Duration.Builder, + com.google.protobuf.DurationOrBuilder>( + getDagTimeout(), getParentForChildren(), isClean()); + dagTimeout_ = null; + } + return dagTimeoutBuilder_; + } + + private com.google.protobuf.Timestamp dagStartTime_; + private com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.Timestamp, + com.google.protobuf.Timestamp.Builder, + com.google.protobuf.TimestampOrBuilder> + dagStartTimeBuilder_; + /** + * + * + *
+     * Output only. DAG start time, which is only set for workflows with
+     * [dag_timeout][google.cloud.dataproc.v1beta2.WorkflowMetadata.dag_timeout]
+     * when the DAG begins.
+     * 
+ * + * + * .google.protobuf.Timestamp dag_start_time = 13 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @return Whether the dagStartTime field is set. + */ + public boolean hasDagStartTime() { + return dagStartTimeBuilder_ != null || dagStartTime_ != null; + } + /** + * + * + *
+     * Output only. DAG start time, which is only set for workflows with
+     * [dag_timeout][google.cloud.dataproc.v1beta2.WorkflowMetadata.dag_timeout]
+     * when the DAG begins.
+     * 
+ * + * + * .google.protobuf.Timestamp dag_start_time = 13 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @return The dagStartTime. + */ + public com.google.protobuf.Timestamp getDagStartTime() { + if (dagStartTimeBuilder_ == null) { + return dagStartTime_ == null + ? com.google.protobuf.Timestamp.getDefaultInstance() + : dagStartTime_; + } else { + return dagStartTimeBuilder_.getMessage(); + } + } + /** + * + * + *
+     * Output only. DAG start time, which is only set for workflows with
+     * [dag_timeout][google.cloud.dataproc.v1beta2.WorkflowMetadata.dag_timeout]
+     * when the DAG begins.
+     * 
+ * + * + * .google.protobuf.Timestamp dag_start_time = 13 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public Builder setDagStartTime(com.google.protobuf.Timestamp value) { + if (dagStartTimeBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + dagStartTime_ = value; + onChanged(); + } else { + dagStartTimeBuilder_.setMessage(value); + } + + return this; + } + /** + * + * + *
+     * Output only. DAG start time, which is only set for workflows with
+     * [dag_timeout][google.cloud.dataproc.v1beta2.WorkflowMetadata.dag_timeout]
+     * when the DAG begins.
+     * 
+ * + * + * .google.protobuf.Timestamp dag_start_time = 13 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public Builder setDagStartTime(com.google.protobuf.Timestamp.Builder builderForValue) { + if (dagStartTimeBuilder_ == null) { + dagStartTime_ = builderForValue.build(); + onChanged(); + } else { + dagStartTimeBuilder_.setMessage(builderForValue.build()); + } + + return this; + } + /** + * + * + *
+     * Output only. DAG start time, which is only set for workflows with
+     * [dag_timeout][google.cloud.dataproc.v1beta2.WorkflowMetadata.dag_timeout]
+     * when the DAG begins.
+     * 
+ * + * + * .google.protobuf.Timestamp dag_start_time = 13 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public Builder mergeDagStartTime(com.google.protobuf.Timestamp value) { + if (dagStartTimeBuilder_ == null) { + if (dagStartTime_ != null) { + dagStartTime_ = + com.google.protobuf.Timestamp.newBuilder(dagStartTime_) + .mergeFrom(value) + .buildPartial(); + } else { + dagStartTime_ = value; + } + onChanged(); + } else { + dagStartTimeBuilder_.mergeFrom(value); + } + + return this; + } + /** + * + * + *
+     * Output only. DAG start time, which is only set for workflows with
+     * [dag_timeout][google.cloud.dataproc.v1beta2.WorkflowMetadata.dag_timeout]
+     * when the DAG begins.
+     * 
+ * + * + * .google.protobuf.Timestamp dag_start_time = 13 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public Builder clearDagStartTime() { + if (dagStartTimeBuilder_ == null) { + dagStartTime_ = null; + onChanged(); + } else { + dagStartTime_ = null; + dagStartTimeBuilder_ = null; + } + + return this; + } + /** + * + * + *
+     * Output only. DAG start time, which is only set for workflows with
+     * [dag_timeout][google.cloud.dataproc.v1beta2.WorkflowMetadata.dag_timeout]
+     * when the DAG begins.
+     * 
+ * + * + * .google.protobuf.Timestamp dag_start_time = 13 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public com.google.protobuf.Timestamp.Builder getDagStartTimeBuilder() { + + onChanged(); + return getDagStartTimeFieldBuilder().getBuilder(); + } + /** + * + * + *
+     * Output only. DAG start time, which is only set for workflows with
+     * [dag_timeout][google.cloud.dataproc.v1beta2.WorkflowMetadata.dag_timeout]
+     * when the DAG begins.
+     * 
+ * + * + * .google.protobuf.Timestamp dag_start_time = 13 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public com.google.protobuf.TimestampOrBuilder getDagStartTimeOrBuilder() { + if (dagStartTimeBuilder_ != null) { + return dagStartTimeBuilder_.getMessageOrBuilder(); + } else { + return dagStartTime_ == null + ? com.google.protobuf.Timestamp.getDefaultInstance() + : dagStartTime_; + } + } + /** + * + * + *
+     * Output only. DAG start time, which is only set for workflows with
+     * [dag_timeout][google.cloud.dataproc.v1beta2.WorkflowMetadata.dag_timeout]
+     * when the DAG begins.
+     * 
+ * + * + * .google.protobuf.Timestamp dag_start_time = 13 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + private com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.Timestamp, + com.google.protobuf.Timestamp.Builder, + com.google.protobuf.TimestampOrBuilder> + getDagStartTimeFieldBuilder() { + if (dagStartTimeBuilder_ == null) { + dagStartTimeBuilder_ = + new com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.Timestamp, + com.google.protobuf.Timestamp.Builder, + com.google.protobuf.TimestampOrBuilder>( + getDagStartTime(), getParentForChildren(), isClean()); + dagStartTime_ = null; + } + return dagStartTimeBuilder_; + } + + private com.google.protobuf.Timestamp dagEndTime_; + private com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.Timestamp, + com.google.protobuf.Timestamp.Builder, + com.google.protobuf.TimestampOrBuilder> + dagEndTimeBuilder_; + /** + * + * + *
+     * Output only. DAG end time, which is only set for workflows with
+     * [dag_timeout][google.cloud.dataproc.v1beta2.WorkflowMetadata.dag_timeout]
+     * when the DAG ends.
+     * 
+ * + * + * .google.protobuf.Timestamp dag_end_time = 14 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @return Whether the dagEndTime field is set. + */ + public boolean hasDagEndTime() { + return dagEndTimeBuilder_ != null || dagEndTime_ != null; + } + /** + * + * + *
+     * Output only. DAG end time, which is only set for workflows with
+     * [dag_timeout][google.cloud.dataproc.v1beta2.WorkflowMetadata.dag_timeout]
+     * when the DAG ends.
+     * 
+ * + * + * .google.protobuf.Timestamp dag_end_time = 14 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @return The dagEndTime. + */ + public com.google.protobuf.Timestamp getDagEndTime() { + if (dagEndTimeBuilder_ == null) { + return dagEndTime_ == null + ? com.google.protobuf.Timestamp.getDefaultInstance() + : dagEndTime_; + } else { + return dagEndTimeBuilder_.getMessage(); + } + } + /** + * + * + *
+     * Output only. DAG end time, which is only set for workflows with
+     * [dag_timeout][google.cloud.dataproc.v1beta2.WorkflowMetadata.dag_timeout]
+     * when the DAG ends.
+     * 
+ * + * + * .google.protobuf.Timestamp dag_end_time = 14 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public Builder setDagEndTime(com.google.protobuf.Timestamp value) { + if (dagEndTimeBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + dagEndTime_ = value; + onChanged(); + } else { + dagEndTimeBuilder_.setMessage(value); + } + + return this; + } + /** + * + * + *
+     * Output only. DAG end time, which is only set for workflows with
+     * [dag_timeout][google.cloud.dataproc.v1beta2.WorkflowMetadata.dag_timeout]
+     * when the DAG ends.
+     * 
+ * + * + * .google.protobuf.Timestamp dag_end_time = 14 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public Builder setDagEndTime(com.google.protobuf.Timestamp.Builder builderForValue) { + if (dagEndTimeBuilder_ == null) { + dagEndTime_ = builderForValue.build(); + onChanged(); + } else { + dagEndTimeBuilder_.setMessage(builderForValue.build()); + } + + return this; + } + /** + * + * + *
+     * Output only. DAG end time, which is only set for workflows with
+     * [dag_timeout][google.cloud.dataproc.v1beta2.WorkflowMetadata.dag_timeout]
+     * when the DAG ends.
+     * 
+ * + * + * .google.protobuf.Timestamp dag_end_time = 14 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public Builder mergeDagEndTime(com.google.protobuf.Timestamp value) { + if (dagEndTimeBuilder_ == null) { + if (dagEndTime_ != null) { + dagEndTime_ = + com.google.protobuf.Timestamp.newBuilder(dagEndTime_).mergeFrom(value).buildPartial(); + } else { + dagEndTime_ = value; + } + onChanged(); + } else { + dagEndTimeBuilder_.mergeFrom(value); + } + + return this; + } + /** + * + * + *
+     * Output only. DAG end time, which is only set for workflows with
+     * [dag_timeout][google.cloud.dataproc.v1beta2.WorkflowMetadata.dag_timeout]
+     * when the DAG ends.
+     * 
+ * + * + * .google.protobuf.Timestamp dag_end_time = 14 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public Builder clearDagEndTime() { + if (dagEndTimeBuilder_ == null) { + dagEndTime_ = null; + onChanged(); + } else { + dagEndTime_ = null; + dagEndTimeBuilder_ = null; + } + + return this; + } + /** + * + * + *
+     * Output only. DAG end time, which is only set for workflows with
+     * [dag_timeout][google.cloud.dataproc.v1beta2.WorkflowMetadata.dag_timeout]
+     * when the DAG ends.
+     * 
+ * + * + * .google.protobuf.Timestamp dag_end_time = 14 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public com.google.protobuf.Timestamp.Builder getDagEndTimeBuilder() { + + onChanged(); + return getDagEndTimeFieldBuilder().getBuilder(); + } + /** + * + * + *
+     * Output only. DAG end time, which is only set for workflows with
+     * [dag_timeout][google.cloud.dataproc.v1beta2.WorkflowMetadata.dag_timeout]
+     * when the DAG ends.
+     * 
+ * + * + * .google.protobuf.Timestamp dag_end_time = 14 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + public com.google.protobuf.TimestampOrBuilder getDagEndTimeOrBuilder() { + if (dagEndTimeBuilder_ != null) { + return dagEndTimeBuilder_.getMessageOrBuilder(); + } else { + return dagEndTime_ == null + ? com.google.protobuf.Timestamp.getDefaultInstance() + : dagEndTime_; + } + } + /** + * + * + *
+     * Output only. DAG end time, which is only set for workflows with
+     * [dag_timeout][google.cloud.dataproc.v1beta2.WorkflowMetadata.dag_timeout]
+     * when the DAG ends.
+     * 
+ * + * + * .google.protobuf.Timestamp dag_end_time = 14 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + private com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.Timestamp, + com.google.protobuf.Timestamp.Builder, + com.google.protobuf.TimestampOrBuilder> + getDagEndTimeFieldBuilder() { + if (dagEndTimeBuilder_ == null) { + dagEndTimeBuilder_ = + new com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.Timestamp, + com.google.protobuf.Timestamp.Builder, + com.google.protobuf.TimestampOrBuilder>( + getDagEndTime(), getParentForChildren(), isClean()); + dagEndTime_ = null; + } + return dagEndTimeBuilder_; + } + @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); diff --git a/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/WorkflowMetadataOrBuilder.java b/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/WorkflowMetadataOrBuilder.java index 5b59db24..9630991a 100644 --- a/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/WorkflowMetadataOrBuilder.java +++ b/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/WorkflowMetadataOrBuilder.java @@ -407,4 +407,148 @@ public interface WorkflowMetadataOrBuilder * @return The bytes for clusterUuid. */ com.google.protobuf.ByteString getClusterUuidBytes(); + + /** + * + * + *
+   * Output only. The timeout duration for the DAG of jobs.
+   * Minimum timeout duration is 10 minutes and maximum is 24 hours, expressed
+   * as a
+   * [google.protobuf.Duration][https://developers.google.com/protocol-buffers/docs/proto3#json_mapping].
+   * For example, "1800" = 1800 seconds/30 minutes duration.
+   * 
+ * + * .google.protobuf.Duration dag_timeout = 12 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @return Whether the dagTimeout field is set. + */ + boolean hasDagTimeout(); + /** + * + * + *
+   * Output only. The timeout duration for the DAG of jobs.
+   * Minimum timeout duration is 10 minutes and maximum is 24 hours, expressed
+   * as a
+   * [google.protobuf.Duration][https://developers.google.com/protocol-buffers/docs/proto3#json_mapping].
+   * For example, "1800" = 1800 seconds/30 minutes duration.
+   * 
+ * + * .google.protobuf.Duration dag_timeout = 12 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @return The dagTimeout. + */ + com.google.protobuf.Duration getDagTimeout(); + /** + * + * + *
+   * Output only. The timeout duration for the DAG of jobs.
+   * Minimum timeout duration is 10 minutes and maximum is 24 hours, expressed
+   * as a
+   * [google.protobuf.Duration][https://developers.google.com/protocol-buffers/docs/proto3#json_mapping].
+   * For example, "1800" = 1800 seconds/30 minutes duration.
+   * 
+ * + * .google.protobuf.Duration dag_timeout = 12 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + com.google.protobuf.DurationOrBuilder getDagTimeoutOrBuilder(); + + /** + * + * + *
+   * Output only. DAG start time, which is only set for workflows with
+   * [dag_timeout][google.cloud.dataproc.v1beta2.WorkflowMetadata.dag_timeout]
+   * when the DAG begins.
+   * 
+ * + * + * .google.protobuf.Timestamp dag_start_time = 13 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @return Whether the dagStartTime field is set. + */ + boolean hasDagStartTime(); + /** + * + * + *
+   * Output only. DAG start time, which is only set for workflows with
+   * [dag_timeout][google.cloud.dataproc.v1beta2.WorkflowMetadata.dag_timeout]
+   * when the DAG begins.
+   * 
+ * + * + * .google.protobuf.Timestamp dag_start_time = 13 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @return The dagStartTime. + */ + com.google.protobuf.Timestamp getDagStartTime(); + /** + * + * + *
+   * Output only. DAG start time, which is only set for workflows with
+   * [dag_timeout][google.cloud.dataproc.v1beta2.WorkflowMetadata.dag_timeout]
+   * when the DAG begins.
+   * 
+ * + * + * .google.protobuf.Timestamp dag_start_time = 13 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + com.google.protobuf.TimestampOrBuilder getDagStartTimeOrBuilder(); + + /** + * + * + *
+   * Output only. DAG end time, which is only set for workflows with
+   * [dag_timeout][google.cloud.dataproc.v1beta2.WorkflowMetadata.dag_timeout]
+   * when the DAG ends.
+   * 
+ * + * + * .google.protobuf.Timestamp dag_end_time = 14 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @return Whether the dagEndTime field is set. + */ + boolean hasDagEndTime(); + /** + * + * + *
+   * Output only. DAG end time, which is only set for workflows with
+   * [dag_timeout][google.cloud.dataproc.v1beta2.WorkflowMetadata.dag_timeout]
+   * when the DAG ends.
+   * 
+ * + * + * .google.protobuf.Timestamp dag_end_time = 14 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + * + * @return The dagEndTime. + */ + com.google.protobuf.Timestamp getDagEndTime(); + /** + * + * + *
+   * Output only. DAG end time, which is only set for workflows with
+   * [dag_timeout][google.cloud.dataproc.v1beta2.WorkflowMetadata.dag_timeout]
+   * when the DAG ends.
+   * 
+ * + * + * .google.protobuf.Timestamp dag_end_time = 14 [(.google.api.field_behavior) = OUTPUT_ONLY]; + * + */ + com.google.protobuf.TimestampOrBuilder getDagEndTimeOrBuilder(); } diff --git a/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/WorkflowTemplate.java b/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/WorkflowTemplate.java index d794731c..4907f90b 100644 --- a/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/WorkflowTemplate.java +++ b/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/WorkflowTemplate.java @@ -177,6 +177,21 @@ private WorkflowTemplate( extensionRegistry)); break; } + case 82: + { + com.google.protobuf.Duration.Builder subBuilder = null; + if (dagTimeout_ != null) { + subBuilder = dagTimeout_.toBuilder(); + } + dagTimeout_ = + input.readMessage(com.google.protobuf.Duration.parser(), extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(dagTimeout_); + dagTimeout_ = subBuilder.buildPartial(); + } + + break; + } default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { @@ -809,6 +824,79 @@ public com.google.cloud.dataproc.v1beta2.TemplateParameterOrBuilder getParameter return parameters_.get(index); } + public static final int DAG_TIMEOUT_FIELD_NUMBER = 10; + private com.google.protobuf.Duration dagTimeout_; + /** + * + * + *
+   * Optional. Timeout duration for the DAG of jobs. You can use "s", "m", "h",
+   * and "d" suffixes for second, minute, hour, and day duration values,
+   * respectively. The timeout duration must be from 10 minutes ("10m") to 24
+   * hours ("24h" or "1d"). The timer begins when the first job is submitted. If
+   * the workflow is running at the end of the timeout period, any remaining
+   * jobs are cancelled, the workflow is terminated, and if the workflow was
+   * running on a [managed
+   * cluster](https://cloud.google.com/dataproc/docs/concepts/workflows/using-workflows#configuring_or_selecting_a_cluster),
+   * the cluster is deleted.
+   * 
+ * + * .google.protobuf.Duration dag_timeout = 10 [(.google.api.field_behavior) = OPTIONAL]; + * + * + * @return Whether the dagTimeout field is set. + */ + @java.lang.Override + public boolean hasDagTimeout() { + return dagTimeout_ != null; + } + /** + * + * + *
+   * Optional. Timeout duration for the DAG of jobs. You can use "s", "m", "h",
+   * and "d" suffixes for second, minute, hour, and day duration values,
+   * respectively. The timeout duration must be from 10 minutes ("10m") to 24
+   * hours ("24h" or "1d"). The timer begins when the first job is submitted. If
+   * the workflow is running at the end of the timeout period, any remaining
+   * jobs are cancelled, the workflow is terminated, and if the workflow was
+   * running on a [managed
+   * cluster](https://cloud.google.com/dataproc/docs/concepts/workflows/using-workflows#configuring_or_selecting_a_cluster),
+   * the cluster is deleted.
+   * 
+ * + * .google.protobuf.Duration dag_timeout = 10 [(.google.api.field_behavior) = OPTIONAL]; + * + * + * @return The dagTimeout. + */ + @java.lang.Override + public com.google.protobuf.Duration getDagTimeout() { + return dagTimeout_ == null ? com.google.protobuf.Duration.getDefaultInstance() : dagTimeout_; + } + /** + * + * + *
+   * Optional. Timeout duration for the DAG of jobs. You can use "s", "m", "h",
+   * and "d" suffixes for second, minute, hour, and day duration values,
+   * respectively. The timeout duration must be from 10 minutes ("10m") to 24
+   * hours ("24h" or "1d"). The timer begins when the first job is submitted. If
+   * the workflow is running at the end of the timeout period, any remaining
+   * jobs are cancelled, the workflow is terminated, and if the workflow was
+   * running on a [managed
+   * cluster](https://cloud.google.com/dataproc/docs/concepts/workflows/using-workflows#configuring_or_selecting_a_cluster),
+   * the cluster is deleted.
+   * 
+ * + * .google.protobuf.Duration dag_timeout = 10 [(.google.api.field_behavior) = OPTIONAL]; + * + */ + @java.lang.Override + public com.google.protobuf.DurationOrBuilder getDagTimeoutOrBuilder() { + return getDagTimeout(); + } + private byte memoizedIsInitialized = -1; @java.lang.Override @@ -849,6 +937,9 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io for (int i = 0; i < parameters_.size(); i++) { output.writeMessage(9, parameters_.get(i)); } + if (dagTimeout_ != null) { + output.writeMessage(10, getDagTimeout()); + } unknownFields.writeTo(output); } @@ -892,6 +983,9 @@ public int getSerializedSize() { for (int i = 0; i < parameters_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(9, parameters_.get(i)); } + if (dagTimeout_ != null) { + size += com.google.protobuf.CodedOutputStream.computeMessageSize(10, getDagTimeout()); + } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; @@ -926,6 +1020,10 @@ public boolean equals(final java.lang.Object obj) { } if (!getJobsList().equals(other.getJobsList())) return false; if (!getParametersList().equals(other.getParametersList())) return false; + if (hasDagTimeout() != other.hasDagTimeout()) return false; + if (hasDagTimeout()) { + if (!getDagTimeout().equals(other.getDagTimeout())) return false; + } if (!unknownFields.equals(other.unknownFields)) return false; return true; } @@ -967,6 +1065,10 @@ public int hashCode() { hash = (37 * hash) + PARAMETERS_FIELD_NUMBER; hash = (53 * hash) + getParametersList().hashCode(); } + if (hasDagTimeout()) { + hash = (37 * hash) + DAG_TIMEOUT_FIELD_NUMBER; + hash = (53 * hash) + getDagTimeout().hashCode(); + } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; @@ -1172,6 +1274,12 @@ public Builder clear() { } else { parametersBuilder_.clear(); } + if (dagTimeoutBuilder_ == null) { + dagTimeout_ = null; + } else { + dagTimeout_ = null; + dagTimeoutBuilder_ = null; + } return this; } @@ -1238,6 +1346,11 @@ public com.google.cloud.dataproc.v1beta2.WorkflowTemplate buildPartial() { } else { result.parameters_ = parametersBuilder_.build(); } + if (dagTimeoutBuilder_ == null) { + result.dagTimeout_ = dagTimeout_; + } else { + result.dagTimeout_ = dagTimeoutBuilder_.build(); + } onBuilt(); return result; } @@ -1363,6 +1476,9 @@ public Builder mergeFrom(com.google.cloud.dataproc.v1beta2.WorkflowTemplate othe } } } + if (other.hasDagTimeout()) { + mergeDagTimeout(other.getDagTimeout()); + } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; @@ -3318,6 +3434,270 @@ public com.google.cloud.dataproc.v1beta2.TemplateParameter.Builder addParameters return parametersBuilder_; } + private com.google.protobuf.Duration dagTimeout_; + private com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.Duration, + com.google.protobuf.Duration.Builder, + com.google.protobuf.DurationOrBuilder> + dagTimeoutBuilder_; + /** + * + * + *
+     * Optional. Timeout duration for the DAG of jobs. You can use "s", "m", "h",
+     * and "d" suffixes for second, minute, hour, and day duration values,
+     * respectively. The timeout duration must be from 10 minutes ("10m") to 24
+     * hours ("24h" or "1d"). The timer begins when the first job is submitted. If
+     * the workflow is running at the end of the timeout period, any remaining
+     * jobs are cancelled, the workflow is terminated, and if the workflow was
+     * running on a [managed
+     * cluster](https://cloud.google.com/dataproc/docs/concepts/workflows/using-workflows#configuring_or_selecting_a_cluster),
+     * the cluster is deleted.
+     * 
+ * + * .google.protobuf.Duration dag_timeout = 10 [(.google.api.field_behavior) = OPTIONAL]; + * + * + * @return Whether the dagTimeout field is set. + */ + public boolean hasDagTimeout() { + return dagTimeoutBuilder_ != null || dagTimeout_ != null; + } + /** + * + * + *
+     * Optional. Timeout duration for the DAG of jobs. You can use "s", "m", "h",
+     * and "d" suffixes for second, minute, hour, and day duration values,
+     * respectively. The timeout duration must be from 10 minutes ("10m") to 24
+     * hours ("24h" or "1d"). The timer begins when the first job is submitted. If
+     * the workflow is running at the end of the timeout period, any remaining
+     * jobs are cancelled, the workflow is terminated, and if the workflow was
+     * running on a [managed
+     * cluster](https://cloud.google.com/dataproc/docs/concepts/workflows/using-workflows#configuring_or_selecting_a_cluster),
+     * the cluster is deleted.
+     * 
+ * + * .google.protobuf.Duration dag_timeout = 10 [(.google.api.field_behavior) = OPTIONAL]; + * + * + * @return The dagTimeout. + */ + public com.google.protobuf.Duration getDagTimeout() { + if (dagTimeoutBuilder_ == null) { + return dagTimeout_ == null + ? com.google.protobuf.Duration.getDefaultInstance() + : dagTimeout_; + } else { + return dagTimeoutBuilder_.getMessage(); + } + } + /** + * + * + *
+     * Optional. Timeout duration for the DAG of jobs. You can use "s", "m", "h",
+     * and "d" suffixes for second, minute, hour, and day duration values,
+     * respectively. The timeout duration must be from 10 minutes ("10m") to 24
+     * hours ("24h" or "1d"). The timer begins when the first job is submitted. If
+     * the workflow is running at the end of the timeout period, any remaining
+     * jobs are cancelled, the workflow is terminated, and if the workflow was
+     * running on a [managed
+     * cluster](https://cloud.google.com/dataproc/docs/concepts/workflows/using-workflows#configuring_or_selecting_a_cluster),
+     * the cluster is deleted.
+     * 
+ * + * .google.protobuf.Duration dag_timeout = 10 [(.google.api.field_behavior) = OPTIONAL]; + * + */ + public Builder setDagTimeout(com.google.protobuf.Duration value) { + if (dagTimeoutBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + dagTimeout_ = value; + onChanged(); + } else { + dagTimeoutBuilder_.setMessage(value); + } + + return this; + } + /** + * + * + *
+     * Optional. Timeout duration for the DAG of jobs. You can use "s", "m", "h",
+     * and "d" suffixes for second, minute, hour, and day duration values,
+     * respectively. The timeout duration must be from 10 minutes ("10m") to 24
+     * hours ("24h" or "1d"). The timer begins when the first job is submitted. If
+     * the workflow is running at the end of the timeout period, any remaining
+     * jobs are cancelled, the workflow is terminated, and if the workflow was
+     * running on a [managed
+     * cluster](https://cloud.google.com/dataproc/docs/concepts/workflows/using-workflows#configuring_or_selecting_a_cluster),
+     * the cluster is deleted.
+     * 
+ * + * .google.protobuf.Duration dag_timeout = 10 [(.google.api.field_behavior) = OPTIONAL]; + * + */ + public Builder setDagTimeout(com.google.protobuf.Duration.Builder builderForValue) { + if (dagTimeoutBuilder_ == null) { + dagTimeout_ = builderForValue.build(); + onChanged(); + } else { + dagTimeoutBuilder_.setMessage(builderForValue.build()); + } + + return this; + } + /** + * + * + *
+     * Optional. Timeout duration for the DAG of jobs. You can use "s", "m", "h",
+     * and "d" suffixes for second, minute, hour, and day duration values,
+     * respectively. The timeout duration must be from 10 minutes ("10m") to 24
+     * hours ("24h" or "1d"). The timer begins when the first job is submitted. If
+     * the workflow is running at the end of the timeout period, any remaining
+     * jobs are cancelled, the workflow is terminated, and if the workflow was
+     * running on a [managed
+     * cluster](https://cloud.google.com/dataproc/docs/concepts/workflows/using-workflows#configuring_or_selecting_a_cluster),
+     * the cluster is deleted.
+     * 
+ * + * .google.protobuf.Duration dag_timeout = 10 [(.google.api.field_behavior) = OPTIONAL]; + * + */ + public Builder mergeDagTimeout(com.google.protobuf.Duration value) { + if (dagTimeoutBuilder_ == null) { + if (dagTimeout_ != null) { + dagTimeout_ = + com.google.protobuf.Duration.newBuilder(dagTimeout_).mergeFrom(value).buildPartial(); + } else { + dagTimeout_ = value; + } + onChanged(); + } else { + dagTimeoutBuilder_.mergeFrom(value); + } + + return this; + } + /** + * + * + *
+     * Optional. Timeout duration for the DAG of jobs. You can use "s", "m", "h",
+     * and "d" suffixes for second, minute, hour, and day duration values,
+     * respectively. The timeout duration must be from 10 minutes ("10m") to 24
+     * hours ("24h" or "1d"). The timer begins when the first job is submitted. If
+     * the workflow is running at the end of the timeout period, any remaining
+     * jobs are cancelled, the workflow is terminated, and if the workflow was
+     * running on a [managed
+     * cluster](https://cloud.google.com/dataproc/docs/concepts/workflows/using-workflows#configuring_or_selecting_a_cluster),
+     * the cluster is deleted.
+     * 
+ * + * .google.protobuf.Duration dag_timeout = 10 [(.google.api.field_behavior) = OPTIONAL]; + * + */ + public Builder clearDagTimeout() { + if (dagTimeoutBuilder_ == null) { + dagTimeout_ = null; + onChanged(); + } else { + dagTimeout_ = null; + dagTimeoutBuilder_ = null; + } + + return this; + } + /** + * + * + *
+     * Optional. Timeout duration for the DAG of jobs. You can use "s", "m", "h",
+     * and "d" suffixes for second, minute, hour, and day duration values,
+     * respectively. The timeout duration must be from 10 minutes ("10m") to 24
+     * hours ("24h" or "1d"). The timer begins when the first job is submitted. If
+     * the workflow is running at the end of the timeout period, any remaining
+     * jobs are cancelled, the workflow is terminated, and if the workflow was
+     * running on a [managed
+     * cluster](https://cloud.google.com/dataproc/docs/concepts/workflows/using-workflows#configuring_or_selecting_a_cluster),
+     * the cluster is deleted.
+     * 
+ * + * .google.protobuf.Duration dag_timeout = 10 [(.google.api.field_behavior) = OPTIONAL]; + * + */ + public com.google.protobuf.Duration.Builder getDagTimeoutBuilder() { + + onChanged(); + return getDagTimeoutFieldBuilder().getBuilder(); + } + /** + * + * + *
+     * Optional. Timeout duration for the DAG of jobs. You can use "s", "m", "h",
+     * and "d" suffixes for second, minute, hour, and day duration values,
+     * respectively. The timeout duration must be from 10 minutes ("10m") to 24
+     * hours ("24h" or "1d"). The timer begins when the first job is submitted. If
+     * the workflow is running at the end of the timeout period, any remaining
+     * jobs are cancelled, the workflow is terminated, and if the workflow was
+     * running on a [managed
+     * cluster](https://cloud.google.com/dataproc/docs/concepts/workflows/using-workflows#configuring_or_selecting_a_cluster),
+     * the cluster is deleted.
+     * 
+ * + * .google.protobuf.Duration dag_timeout = 10 [(.google.api.field_behavior) = OPTIONAL]; + * + */ + public com.google.protobuf.DurationOrBuilder getDagTimeoutOrBuilder() { + if (dagTimeoutBuilder_ != null) { + return dagTimeoutBuilder_.getMessageOrBuilder(); + } else { + return dagTimeout_ == null + ? com.google.protobuf.Duration.getDefaultInstance() + : dagTimeout_; + } + } + /** + * + * + *
+     * Optional. Timeout duration for the DAG of jobs. You can use "s", "m", "h",
+     * and "d" suffixes for second, minute, hour, and day duration values,
+     * respectively. The timeout duration must be from 10 minutes ("10m") to 24
+     * hours ("24h" or "1d"). The timer begins when the first job is submitted. If
+     * the workflow is running at the end of the timeout period, any remaining
+     * jobs are cancelled, the workflow is terminated, and if the workflow was
+     * running on a [managed
+     * cluster](https://cloud.google.com/dataproc/docs/concepts/workflows/using-workflows#configuring_or_selecting_a_cluster),
+     * the cluster is deleted.
+     * 
+ * + * .google.protobuf.Duration dag_timeout = 10 [(.google.api.field_behavior) = OPTIONAL]; + * + */ + private com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.Duration, + com.google.protobuf.Duration.Builder, + com.google.protobuf.DurationOrBuilder> + getDagTimeoutFieldBuilder() { + if (dagTimeoutBuilder_ == null) { + dagTimeoutBuilder_ = + new com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.Duration, + com.google.protobuf.Duration.Builder, + com.google.protobuf.DurationOrBuilder>( + getDagTimeout(), getParentForChildren(), isClean()); + dagTimeout_ = null; + } + return dagTimeoutBuilder_; + } + @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); diff --git a/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/WorkflowTemplateName.java b/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/WorkflowTemplateName.java index 77b3526a..172c2420 100644 --- a/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/WorkflowTemplateName.java +++ b/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/WorkflowTemplateName.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -26,29 +26,48 @@ import java.util.List; import java.util.Map; import java.util.Objects; +import javax.annotation.Generated; -/** AUTO-GENERATED DOCUMENTATION AND CLASS */ -@javax.annotation.Generated("by GAPIC protoc plugin") +// AUTO-GENERATED DOCUMENTATION AND CLASS. +@Generated("by gapic-generator-java") public class WorkflowTemplateName implements ResourceName { - - @Deprecated - protected WorkflowTemplateName() {} - - private static final PathTemplate PROJECT_REGION_WORKFLOW_TEMPLATE_PATH_TEMPLATE = + private static final PathTemplate PROJECT_REGION_WORKFLOW_TEMPLATE = PathTemplate.createWithoutUrlEncoding( "projects/{project}/regions/{region}/workflowTemplates/{workflow_template}"); - private static final PathTemplate PROJECT_LOCATION_WORKFLOW_TEMPLATE_PATH_TEMPLATE = + private static final PathTemplate PROJECT_LOCATION_WORKFLOW_TEMPLATE = PathTemplate.createWithoutUrlEncoding( "projects/{project}/locations/{location}/workflowTemplates/{workflow_template}"); - private volatile Map fieldValuesMap; private PathTemplate pathTemplate; private String fixedValue; + private final String project; + private final String region; + private final String workflowTemplate; + private final String location; + + @Deprecated + protected WorkflowTemplateName() { + project = null; + region = null; + workflowTemplate = null; + location = null; + } + + private WorkflowTemplateName(Builder builder) { + project = Preconditions.checkNotNull(builder.getProject()); + region = Preconditions.checkNotNull(builder.getRegion()); + workflowTemplate = Preconditions.checkNotNull(builder.getWorkflowTemplate()); + location = null; + pathTemplate = PROJECT_REGION_WORKFLOW_TEMPLATE; + } - private String project; - private String region; - private String workflowTemplate; - private String location; + private WorkflowTemplateName(ProjectLocationWorkflowTemplateBuilder builder) { + project = Preconditions.checkNotNull(builder.getProject()); + location = Preconditions.checkNotNull(builder.getLocation()); + workflowTemplate = Preconditions.checkNotNull(builder.getWorkflowTemplate()); + region = null; + pathTemplate = PROJECT_LOCATION_WORKFLOW_TEMPLATE; + } public String getProject() { return project; @@ -66,20 +85,6 @@ public String getLocation() { return location; } - private WorkflowTemplateName(Builder builder) { - project = Preconditions.checkNotNull(builder.getProject()); - region = Preconditions.checkNotNull(builder.getRegion()); - workflowTemplate = Preconditions.checkNotNull(builder.getWorkflowTemplate()); - pathTemplate = PROJECT_REGION_WORKFLOW_TEMPLATE_PATH_TEMPLATE; - } - - private WorkflowTemplateName(ProjectLocationWorkflowTemplateBuilder builder) { - project = Preconditions.checkNotNull(builder.getProject()); - location = Preconditions.checkNotNull(builder.getLocation()); - workflowTemplate = Preconditions.checkNotNull(builder.getWorkflowTemplate()); - pathTemplate = PROJECT_LOCATION_WORKFLOW_TEMPLATE_PATH_TEMPLATE; - } - public static Builder newBuilder() { return new Builder(); } @@ -99,7 +104,7 @@ public Builder toBuilder() { } public static WorkflowTemplateName of(String project, String region, String workflowTemplate) { - return newProjectRegionWorkflowTemplateBuilder() + return newBuilder() .setProject(project) .setRegion(region) .setWorkflowTemplate(workflowTemplate) @@ -109,7 +114,7 @@ public static WorkflowTemplateName of(String project, String region, String work @BetaApi("The static create methods are not stable yet and may be changed in the future.") public static WorkflowTemplateName ofProjectRegionWorkflowTemplateName( String project, String region, String workflowTemplate) { - return newProjectRegionWorkflowTemplateBuilder() + return newBuilder() .setProject(project) .setRegion(region) .setWorkflowTemplate(workflowTemplate) @@ -161,18 +166,17 @@ public static WorkflowTemplateName parse(String formattedString) { if (formattedString.isEmpty()) { return null; } - if (PROJECT_REGION_WORKFLOW_TEMPLATE_PATH_TEMPLATE.matches(formattedString)) { - Map matchMap = - PROJECT_REGION_WORKFLOW_TEMPLATE_PATH_TEMPLATE.match(formattedString); + if (PROJECT_REGION_WORKFLOW_TEMPLATE.matches(formattedString)) { + Map matchMap = PROJECT_REGION_WORKFLOW_TEMPLATE.match(formattedString); return ofProjectRegionWorkflowTemplateName( matchMap.get("project"), matchMap.get("region"), matchMap.get("workflow_template")); - } else if (PROJECT_LOCATION_WORKFLOW_TEMPLATE_PATH_TEMPLATE.matches(formattedString)) { - Map matchMap = - PROJECT_LOCATION_WORKFLOW_TEMPLATE_PATH_TEMPLATE.match(formattedString); + } else if (PROJECT_LOCATION_WORKFLOW_TEMPLATE.matches(formattedString)) { + Map matchMap = PROJECT_LOCATION_WORKFLOW_TEMPLATE.match(formattedString); return ofProjectLocationWorkflowTemplateName( matchMap.get("project"), matchMap.get("location"), matchMap.get("workflow_template")); } - throw new ValidationException("JobName.parse: formattedString not in valid format"); + throw new ValidationException( + "WorkflowTemplateName.parse: formattedString not in valid format"); } public static List parseList(List formattedStrings) { @@ -196,8 +200,8 @@ public static List toStringList(List values) { } public static boolean isParsableFrom(String formattedString) { - return PROJECT_REGION_WORKFLOW_TEMPLATE_PATH_TEMPLATE.matches(formattedString) - || PROJECT_LOCATION_WORKFLOW_TEMPLATE_PATH_TEMPLATE.matches(formattedString); + return PROJECT_REGION_WORKFLOW_TEMPLATE.matches(formattedString) + || PROJECT_LOCATION_WORKFLOW_TEMPLATE.matches(formattedString); } @Override @@ -234,9 +238,39 @@ public String toString() { return fixedValue != null ? fixedValue : pathTemplate.instantiate(getFieldValuesMap()); } + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o != null || getClass() == o.getClass()) { + WorkflowTemplateName that = ((WorkflowTemplateName) o); + return Objects.equals(this.project, that.project) + && Objects.equals(this.region, that.region) + && Objects.equals(this.workflowTemplate, that.workflowTemplate) + && Objects.equals(this.location, that.location); + } + return false; + } + + @Override + public int hashCode() { + int h = 1; + h *= 1000003; + h ^= Objects.hashCode(fixedValue); + h *= 1000003; + h ^= Objects.hashCode(project); + h *= 1000003; + h ^= Objects.hashCode(region); + h *= 1000003; + h ^= Objects.hashCode(workflowTemplate); + h *= 1000003; + h ^= Objects.hashCode(location); + return h; + } + /** Builder for projects/{project}/regions/{region}/workflowTemplates/{workflow_template}. */ public static class Builder { - private String project; private String region; private String workflowTemplate; @@ -272,9 +306,8 @@ public Builder setWorkflowTemplate(String workflowTemplate) { private Builder(WorkflowTemplateName workflowTemplateName) { Preconditions.checkArgument( - workflowTemplateName.pathTemplate == PROJECT_REGION_WORKFLOW_TEMPLATE_PATH_TEMPLATE, - "toBuilder is only supported when WorkflowTemplateName has the pattern of " - + "projects/{project}/regions/{region}/workflowTemplates/{workflow_template}."); + Objects.equals(workflowTemplateName.pathTemplate, PROJECT_REGION_WORKFLOW_TEMPLATE), + "toBuilder is only supported when WorkflowTemplateName has the pattern of projects/{project}/regions/{region}/workflowTemplates/{workflow_template}"); project = workflowTemplateName.project; region = workflowTemplateName.region; workflowTemplate = workflowTemplateName.workflowTemplate; @@ -288,12 +321,11 @@ public WorkflowTemplateName build() { /** Builder for projects/{project}/locations/{location}/workflowTemplates/{workflow_template}. */ @BetaApi("The per-pattern Builders are not stable yet and may be changed in the future.") public static class ProjectLocationWorkflowTemplateBuilder { - private String project; private String location; private String workflowTemplate; - private ProjectLocationWorkflowTemplateBuilder() {} + protected ProjectLocationWorkflowTemplateBuilder() {} public String getProject() { return project; @@ -326,35 +358,4 @@ public WorkflowTemplateName build() { return new WorkflowTemplateName(this); } } - - @Override - public boolean equals(Object o) { - if (o == this) { - return true; - } - if (o != null || getClass() == o.getClass()) { - WorkflowTemplateName that = (WorkflowTemplateName) o; - return (Objects.equals(this.project, that.project)) - && (Objects.equals(this.region, that.region)) - && (Objects.equals(this.workflowTemplate, that.workflowTemplate)) - && (Objects.equals(this.location, that.location)); - } - return false; - } - - @Override - public int hashCode() { - int h = 1; - h *= 1000003; - h ^= Objects.hashCode(fixedValue); - h *= 1000003; - h ^= Objects.hashCode(project); - h *= 1000003; - h ^= Objects.hashCode(region); - h *= 1000003; - h ^= Objects.hashCode(workflowTemplate); - h *= 1000003; - h ^= Objects.hashCode(location); - return h; - } } diff --git a/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/WorkflowTemplateOrBuilder.java b/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/WorkflowTemplateOrBuilder.java index 51ed9353..82a75da7 100644 --- a/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/WorkflowTemplateOrBuilder.java +++ b/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/WorkflowTemplateOrBuilder.java @@ -443,4 +443,66 @@ public interface WorkflowTemplateOrBuilder * */ com.google.cloud.dataproc.v1beta2.TemplateParameterOrBuilder getParametersOrBuilder(int index); + + /** + * + * + *
+   * Optional. Timeout duration for the DAG of jobs. You can use "s", "m", "h",
+   * and "d" suffixes for second, minute, hour, and day duration values,
+   * respectively. The timeout duration must be from 10 minutes ("10m") to 24
+   * hours ("24h" or "1d"). The timer begins when the first job is submitted. If
+   * the workflow is running at the end of the timeout period, any remaining
+   * jobs are cancelled, the workflow is terminated, and if the workflow was
+   * running on a [managed
+   * cluster](https://cloud.google.com/dataproc/docs/concepts/workflows/using-workflows#configuring_or_selecting_a_cluster),
+   * the cluster is deleted.
+   * 
+ * + * .google.protobuf.Duration dag_timeout = 10 [(.google.api.field_behavior) = OPTIONAL]; + * + * + * @return Whether the dagTimeout field is set. + */ + boolean hasDagTimeout(); + /** + * + * + *
+   * Optional. Timeout duration for the DAG of jobs. You can use "s", "m", "h",
+   * and "d" suffixes for second, minute, hour, and day duration values,
+   * respectively. The timeout duration must be from 10 minutes ("10m") to 24
+   * hours ("24h" or "1d"). The timer begins when the first job is submitted. If
+   * the workflow is running at the end of the timeout period, any remaining
+   * jobs are cancelled, the workflow is terminated, and if the workflow was
+   * running on a [managed
+   * cluster](https://cloud.google.com/dataproc/docs/concepts/workflows/using-workflows#configuring_or_selecting_a_cluster),
+   * the cluster is deleted.
+   * 
+ * + * .google.protobuf.Duration dag_timeout = 10 [(.google.api.field_behavior) = OPTIONAL]; + * + * + * @return The dagTimeout. + */ + com.google.protobuf.Duration getDagTimeout(); + /** + * + * + *
+   * Optional. Timeout duration for the DAG of jobs. You can use "s", "m", "h",
+   * and "d" suffixes for second, minute, hour, and day duration values,
+   * respectively. The timeout duration must be from 10 minutes ("10m") to 24
+   * hours ("24h" or "1d"). The timer begins when the first job is submitted. If
+   * the workflow is running at the end of the timeout period, any remaining
+   * jobs are cancelled, the workflow is terminated, and if the workflow was
+   * running on a [managed
+   * cluster](https://cloud.google.com/dataproc/docs/concepts/workflows/using-workflows#configuring_or_selecting_a_cluster),
+   * the cluster is deleted.
+   * 
+ * + * .google.protobuf.Duration dag_timeout = 10 [(.google.api.field_behavior) = OPTIONAL]; + * + */ + com.google.protobuf.DurationOrBuilder getDagTimeoutOrBuilder(); } diff --git a/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/WorkflowTemplatesProto.java b/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/WorkflowTemplatesProto.java index c9429838..f2c2967d 100644 --- a/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/WorkflowTemplatesProto.java +++ b/proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/WorkflowTemplatesProto.java @@ -152,194 +152,201 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { + "to\032,google/cloud/dataproc/v1beta2/cluste" + "rs.proto\032(google/cloud/dataproc/v1beta2/" + "jobs.proto\032#google/longrunning/operation" - + "s.proto\032\033google/protobuf/empty.proto\032\037go" - + "ogle/protobuf/timestamp.proto\"\327\005\n\020Workfl" - + "owTemplate\022\017\n\002id\030\002 \001(\tB\003\340A\002\022\021\n\004name\030\001 \001(" - + "\tB\003\340A\003\022\024\n\007version\030\003 \001(\005B\003\340A\001\0224\n\013create_t" - + "ime\030\004 \001(\0132\032.google.protobuf.TimestampB\003\340" - + "A\003\0224\n\013update_time\030\005 \001(\0132\032.google.protobu" - + "f.TimestampB\003\340A\003\022P\n\006labels\030\006 \003(\0132;.googl" - + "e.cloud.dataproc.v1beta2.WorkflowTemplat" - + "e.LabelsEntryB\003\340A\001\022K\n\tplacement\030\007 \001(\01328." - + "google.cloud.dataproc.v1beta2.WorkflowTe" - + "mplatePlacement\0227\n\004jobs\030\010 \003(\0132).google.c" - + "loud.dataproc.v1beta2.OrderedJob\022I\n\npara" - + "meters\030\t \003(\01320.google.cloud.dataproc.v1b" - + "eta2.TemplateParameterB\003\340A\001\032-\n\013LabelsEnt" - + "ry\022\013\n\003key\030\001 \001(\t\022\r\n\005value\030\002 \001(\t:\0028\001:\312\001\352A\306" - + "\001\n(dataproc.googleapis.com/WorkflowTempl" - + "ate\022Iprojects/{project}/regions/{region}" - + "/workflowTemplates/{workflow_template}\022M" - + "projects/{project}/locations/{location}/" - + "workflowTemplates/{workflow_template} \001\"" - + "\276\001\n\031WorkflowTemplatePlacement\022H\n\017managed" - + "_cluster\030\001 \001(\0132-.google.cloud.dataproc.v" - + "1beta2.ManagedClusterH\000\022J\n\020cluster_selec" - + "tor\030\002 \001(\0132..google.cloud.dataproc.v1beta" - + "2.ClusterSelectorH\000B\013\n\tplacement\"\336\001\n\016Man" - + "agedCluster\022\024\n\014cluster_name\030\002 \001(\t\022<\n\006con" - + "fig\030\003 \001(\0132,.google.cloud.dataproc.v1beta" - + "2.ClusterConfig\022I\n\006labels\030\004 \003(\01329.google" - + ".cloud.dataproc.v1beta2.ManagedCluster.L" - + "abelsEntry\032-\n\013LabelsEntry\022\013\n\003key\030\001 \001(\t\022\r" - + "\n\005value\030\002 \001(\t:\0028\001\"\260\001\n\017ClusterSelector\022\014\n" - + "\004zone\030\001 \001(\t\022Y\n\016cluster_labels\030\002 \003(\0132A.go" - + "ogle.cloud.dataproc.v1beta2.ClusterSelec" - + "tor.ClusterLabelsEntry\0324\n\022ClusterLabelsE" - + "ntry\022\013\n\003key\030\001 \001(\t\022\r\n\005value\030\002 \001(\t:\0028\001\"\220\006\n" - + "\nOrderedJob\022\024\n\007step_id\030\001 \001(\tB\003\340A\002\022>\n\nhad" - + "oop_job\030\002 \001(\0132(.google.cloud.dataproc.v1" - + "beta2.HadoopJobH\000\022<\n\tspark_job\030\003 \001(\0132\'.g" - + "oogle.cloud.dataproc.v1beta2.SparkJobH\000\022" - + "@\n\013pyspark_job\030\004 \001(\0132).google.cloud.data" - + "proc.v1beta2.PySparkJobH\000\022:\n\010hive_job\030\005 " - + "\001(\0132&.google.cloud.dataproc.v1beta2.Hive" - + "JobH\000\0228\n\007pig_job\030\006 \001(\0132%.google.cloud.da" - + "taproc.v1beta2.PigJobH\000\022?\n\013spark_r_job\030\013" - + " \001(\0132(.google.cloud.dataproc.v1beta2.Spa" - + "rkRJobH\000\022C\n\rspark_sql_job\030\007 \001(\0132*.google" - + ".cloud.dataproc.v1beta2.SparkSqlJobH\000\022>\n" + + "s.proto\032\036google/protobuf/duration.proto\032" + + "\033google/protobuf/empty.proto\032\037google/pro" + + "tobuf/timestamp.proto\"\214\006\n\020WorkflowTempla" + + "te\022\017\n\002id\030\002 \001(\tB\003\340A\002\022\021\n\004name\030\001 \001(\tB\003\340A\003\022\024" + + "\n\007version\030\003 \001(\005B\003\340A\001\0224\n\013create_time\030\004 \001(" + + "\0132\032.google.protobuf.TimestampB\003\340A\003\0224\n\013up" + + "date_time\030\005 \001(\0132\032.google.protobuf.Timest" + + "ampB\003\340A\003\022P\n\006labels\030\006 \003(\0132;.google.cloud." + + "dataproc.v1beta2.WorkflowTemplate.Labels" + + "EntryB\003\340A\001\022K\n\tplacement\030\007 \001(\01328.google.c" + + "loud.dataproc.v1beta2.WorkflowTemplatePl" + + "acement\0227\n\004jobs\030\010 \003(\0132).google.cloud.dat" + + "aproc.v1beta2.OrderedJob\022I\n\nparameters\030\t" + + " \003(\01320.google.cloud.dataproc.v1beta2.Tem" + + "plateParameterB\003\340A\001\0223\n\013dag_timeout\030\n \001(\013" + + "2\031.google.protobuf.DurationB\003\340A\001\032-\n\013Labe" + + "lsEntry\022\013\n\003key\030\001 \001(\t\022\r\n\005value\030\002 \001(\t:\0028\001:" + + "\312\001\352A\306\001\n(dataproc.googleapis.com/Workflow" + + "Template\022Iprojects/{project}/regions/{re" + + "gion}/workflowTemplates/{workflow_templa" + + "te}\022Mprojects/{project}/locations/{locat" + + "ion}/workflowTemplates/{workflow_templat" + + "e} \001\"\276\001\n\031WorkflowTemplatePlacement\022H\n\017ma" + + "naged_cluster\030\001 \001(\0132-.google.cloud.datap" + + "roc.v1beta2.ManagedClusterH\000\022J\n\020cluster_" + + "selector\030\002 \001(\0132..google.cloud.dataproc.v" + + "1beta2.ClusterSelectorH\000B\013\n\tplacement\"\336\001" + + "\n\016ManagedCluster\022\024\n\014cluster_name\030\002 \001(\t\022<" + + "\n\006config\030\003 \001(\0132,.google.cloud.dataproc.v" + + "1beta2.ClusterConfig\022I\n\006labels\030\004 \003(\01329.g" + + "oogle.cloud.dataproc.v1beta2.ManagedClus" + + "ter.LabelsEntry\032-\n\013LabelsEntry\022\013\n\003key\030\001 " + + "\001(\t\022\r\n\005value\030\002 \001(\t:\0028\001\"\260\001\n\017ClusterSelect" + + "or\022\014\n\004zone\030\001 \001(\t\022Y\n\016cluster_labels\030\002 \003(\013" + + "2A.google.cloud.dataproc.v1beta2.Cluster" + + "Selector.ClusterLabelsEntry\0324\n\022ClusterLa" + + "belsEntry\022\013\n\003key\030\001 \001(\t\022\r\n\005value\030\002 \001(\t:\0028" + + "\001\"\270\006\n\nOrderedJob\022\024\n\007step_id\030\001 \001(\tB\003\340A\002\022C" + + "\n\nhadoop_job\030\002 \001(\0132(.google.cloud.datapr" + + "oc.v1beta2.HadoopJobB\003\340A\001H\000\022A\n\tspark_job" + + "\030\003 \001(\0132\'.google.cloud.dataproc.v1beta2.S" + + "parkJobB\003\340A\001H\000\022E\n\013pyspark_job\030\004 \001(\0132).go" + + "ogle.cloud.dataproc.v1beta2.PySparkJobB\003" + + "\340A\001H\000\022?\n\010hive_job\030\005 \001(\0132&.google.cloud.d" + + "ataproc.v1beta2.HiveJobB\003\340A\001H\000\022=\n\007pig_jo" + + "b\030\006 \001(\0132%.google.cloud.dataproc.v1beta2." + + "PigJobB\003\340A\001H\000\022D\n\013spark_r_job\030\013 \001(\0132(.goo" + + "gle.cloud.dataproc.v1beta2.SparkRJobB\003\340A" + + "\001H\000\022H\n\rspark_sql_job\030\007 \001(\0132*.google.clou" + + "d.dataproc.v1beta2.SparkSqlJobB\003\340A\001H\000\022C\n" + "\npresto_job\030\014 \001(\0132(.google.cloud.datapro" - + "c.v1beta2.PrestoJobH\000\022J\n\006labels\030\010 \003(\01325." - + "google.cloud.dataproc.v1beta2.OrderedJob" - + ".LabelsEntryB\003\340A\001\022E\n\nscheduling\030\t \001(\0132,." - + "google.cloud.dataproc.v1beta2.JobSchedul" - + "ingB\003\340A\001\022\"\n\025prerequisite_step_ids\030\n \003(\tB" - + "\003\340A\001\032-\n\013LabelsEntry\022\013\n\003key\030\001 \001(\t\022\r\n\005valu" - + "e\030\002 \001(\t:\0028\001B\n\n\010job_type\"\216\001\n\021TemplatePara" - + "meter\022\014\n\004name\030\001 \001(\t\022\016\n\006fields\030\002 \003(\t\022\023\n\013d" - + "escription\030\003 \001(\t\022F\n\nvalidation\030\004 \001(\01322.g" - + "oogle.cloud.dataproc.v1beta2.ParameterVa" - + "lidation\"\253\001\n\023ParameterValidation\022?\n\005rege" - + "x\030\001 \001(\0132..google.cloud.dataproc.v1beta2." - + "RegexValidationH\000\022@\n\006values\030\002 \001(\0132..goog" - + "le.cloud.dataproc.v1beta2.ValueValidatio" - + "nH\000B\021\n\017validation_type\"\"\n\017RegexValidatio" - + "n\022\017\n\007regexes\030\001 \003(\t\"!\n\017ValueValidation\022\016\n" - + "\006values\030\001 \003(\t\"\310\005\n\020WorkflowMetadata\022\025\n\010te" - + "mplate\030\001 \001(\tB\003\340A\003\022\024\n\007version\030\002 \001(\005B\003\340A\003\022" - + "L\n\016create_cluster\030\003 \001(\0132/.google.cloud.d" - + "ataproc.v1beta2.ClusterOperationB\003\340A\003\022@\n" - + "\005graph\030\004 \001(\0132,.google.cloud.dataproc.v1b" - + "eta2.WorkflowGraphB\003\340A\003\022L\n\016delete_cluste" - + "r\030\005 \001(\0132/.google.cloud.dataproc.v1beta2." - + "ClusterOperationB\003\340A\003\022I\n\005state\030\006 \001(\01625.g" - + "oogle.cloud.dataproc.v1beta2.WorkflowMet" - + "adata.StateB\003\340A\003\022\031\n\014cluster_name\030\007 \001(\tB\003" - + "\340A\003\022S\n\nparameters\030\010 \003(\0132?.google.cloud.d" - + "ataproc.v1beta2.WorkflowMetadata.Paramet" - + "ersEntry\0223\n\nstart_time\030\t \001(\0132\032.google.pr" - + "otobuf.TimestampB\003\340A\003\0221\n\010end_time\030\n \001(\0132" - + "\032.google.protobuf.TimestampB\003\340A\003\022\031\n\014clus" - + "ter_uuid\030\013 \001(\tB\003\340A\003\0321\n\017ParametersEntry\022\013" - + "\n\003key\030\001 \001(\t\022\r\n\005value\030\002 \001(\t:\0028\001\"8\n\005State\022" - + "\013\n\007UNKNOWN\020\000\022\013\n\007PENDING\020\001\022\013\n\007RUNNING\020\002\022\010" - + "\n\004DONE\020\003\"T\n\020ClusterOperation\022\031\n\014operatio" - + "n_id\030\001 \001(\tB\003\340A\003\022\022\n\005error\030\002 \001(\tB\003\340A\003\022\021\n\004d" - + "one\030\003 \001(\010B\003\340A\003\"P\n\rWorkflowGraph\022?\n\005nodes" - + "\030\001 \003(\0132+.google.cloud.dataproc.v1beta2.W" - + "orkflowNodeB\003\340A\003\"\251\002\n\014WorkflowNode\022\024\n\007ste" - + "p_id\030\001 \001(\tB\003\340A\003\022\"\n\025prerequisite_step_ids" - + "\030\002 \003(\tB\003\340A\003\022\023\n\006job_id\030\003 \001(\tB\003\340A\003\022I\n\005stat" - + "e\030\005 \001(\01625.google.cloud.dataproc.v1beta2." - + "WorkflowNode.NodeStateB\003\340A\003\022\022\n\005error\030\006 \001" - + "(\tB\003\340A\003\"k\n\tNodeState\022\033\n\027NODE_STATUS_UNSP" - + "ECIFIED\020\000\022\013\n\007BLOCKED\020\001\022\014\n\010RUNNABLE\020\002\022\013\n\007" - + "RUNNING\020\003\022\r\n\tCOMPLETED\020\004\022\n\n\006FAILED\020\005\"\251\001\n" - + "\035CreateWorkflowTemplateRequest\022@\n\006parent" - + "\030\001 \001(\tB0\340A\002\372A*\022(dataproc.googleapis.com/" - + "WorkflowTemplate\022F\n\010template\030\002 \001(\0132/.goo" - + "gle.cloud.dataproc.v1beta2.WorkflowTempl" - + "ateB\003\340A\002\"m\n\032GetWorkflowTemplateRequest\022>" - + "\n\004name\030\001 \001(\tB0\340A\002\372A*\n(dataproc.googleapi" - + "s.com/WorkflowTemplate\022\017\n\007version\030\002 \001(\005\"" - + "\274\002\n\"InstantiateWorkflowTemplateRequest\022>" - + "\n\004name\030\001 \001(\tB0\340A\002\372A*\n(dataproc.googleapi" - + "s.com/WorkflowTemplate\022\017\n\007version\030\002 \001(\005\022" - + "\027\n\013instance_id\030\003 \001(\tB\002\030\001\022\022\n\nrequest_id\030\005" - + " \001(\t\022e\n\nparameters\030\004 \003(\0132Q.google.cloud." - + "dataproc.v1beta2.InstantiateWorkflowTemp" - + "lateRequest.ParametersEntry\0321\n\017Parameter" - + "sEntry\022\013\n\003key\030\001 \001(\t\022\r\n\005value\030\002 \001(\t:\0028\001\"\335" - + "\001\n(InstantiateInlineWorkflowTemplateRequ" - + "est\022@\n\006parent\030\001 \001(\tB0\340A\002\372A*\022(dataproc.go" - + "ogleapis.com/WorkflowTemplate\022F\n\010templat" - + "e\030\002 \001(\0132/.google.cloud.dataproc.v1beta2." - + "WorkflowTemplateB\003\340A\002\022\023\n\013instance_id\030\003 \001" - + "(\t\022\022\n\nrequest_id\030\004 \001(\t\"g\n\035UpdateWorkflow" - + "TemplateRequest\022F\n\010template\030\001 \001(\0132/.goog" - + "le.cloud.dataproc.v1beta2.WorkflowTempla" - + "teB\003\340A\002\"\207\001\n\034ListWorkflowTemplatesRequest" + + "c.v1beta2.PrestoJobB\003\340A\001H\000\022J\n\006labels\030\010 \003" + + "(\01325.google.cloud.dataproc.v1beta2.Order" + + "edJob.LabelsEntryB\003\340A\001\022E\n\nscheduling\030\t \001" + + "(\0132,.google.cloud.dataproc.v1beta2.JobSc" + + "hedulingB\003\340A\001\022\"\n\025prerequisite_step_ids\030\n" + + " \003(\tB\003\340A\001\032-\n\013LabelsEntry\022\013\n\003key\030\001 \001(\t\022\r\n" + + "\005value\030\002 \001(\t:\0028\001B\n\n\010job_type\"\216\001\n\021Templat" + + "eParameter\022\014\n\004name\030\001 \001(\t\022\016\n\006fields\030\002 \003(\t" + + "\022\023\n\013description\030\003 \001(\t\022F\n\nvalidation\030\004 \001(" + + "\01322.google.cloud.dataproc.v1beta2.Parame" + + "terValidation\"\253\001\n\023ParameterValidation\022?\n" + + "\005regex\030\001 \001(\0132..google.cloud.dataproc.v1b" + + "eta2.RegexValidationH\000\022@\n\006values\030\002 \001(\0132." + + ".google.cloud.dataproc.v1beta2.ValueVali" + + "dationH\000B\021\n\017validation_type\"\"\n\017RegexVali" + + "dation\022\017\n\007regexes\030\001 \003(\t\"!\n\017ValueValidati" + + "on\022\016\n\006values\030\001 \003(\t\"\355\006\n\020WorkflowMetadata\022" + + "\025\n\010template\030\001 \001(\tB\003\340A\003\022\024\n\007version\030\002 \001(\005B" + + "\003\340A\003\022L\n\016create_cluster\030\003 \001(\0132/.google.cl" + + "oud.dataproc.v1beta2.ClusterOperationB\003\340" + + "A\003\022@\n\005graph\030\004 \001(\0132,.google.cloud.datapro" + + "c.v1beta2.WorkflowGraphB\003\340A\003\022L\n\016delete_c" + + "luster\030\005 \001(\0132/.google.cloud.dataproc.v1b" + + "eta2.ClusterOperationB\003\340A\003\022I\n\005state\030\006 \001(" + + "\01625.google.cloud.dataproc.v1beta2.Workfl" + + "owMetadata.StateB\003\340A\003\022\031\n\014cluster_name\030\007 " + + "\001(\tB\003\340A\003\022S\n\nparameters\030\010 \003(\0132?.google.cl" + + "oud.dataproc.v1beta2.WorkflowMetadata.Pa" + + "rametersEntry\0223\n\nstart_time\030\t \001(\0132\032.goog" + + "le.protobuf.TimestampB\003\340A\003\0221\n\010end_time\030\n" + + " \001(\0132\032.google.protobuf.TimestampB\003\340A\003\022\031\n" + + "\014cluster_uuid\030\013 \001(\tB\003\340A\003\0223\n\013dag_timeout\030" + + "\014 \001(\0132\031.google.protobuf.DurationB\003\340A\003\0227\n" + + "\016dag_start_time\030\r \001(\0132\032.google.protobuf." + + "TimestampB\003\340A\003\0225\n\014dag_end_time\030\016 \001(\0132\032.g" + + "oogle.protobuf.TimestampB\003\340A\003\0321\n\017Paramet" + + "ersEntry\022\013\n\003key\030\001 \001(\t\022\r\n\005value\030\002 \001(\t:\0028\001" + + "\"8\n\005State\022\013\n\007UNKNOWN\020\000\022\013\n\007PENDING\020\001\022\013\n\007R" + + "UNNING\020\002\022\010\n\004DONE\020\003\"T\n\020ClusterOperation\022\031" + + "\n\014operation_id\030\001 \001(\tB\003\340A\003\022\022\n\005error\030\002 \001(\t" + + "B\003\340A\003\022\021\n\004done\030\003 \001(\010B\003\340A\003\"P\n\rWorkflowGrap" + + "h\022?\n\005nodes\030\001 \003(\0132+.google.cloud.dataproc" + + ".v1beta2.WorkflowNodeB\003\340A\003\"\251\002\n\014WorkflowN" + + "ode\022\024\n\007step_id\030\001 \001(\tB\003\340A\003\022\"\n\025prerequisit" + + "e_step_ids\030\002 \003(\tB\003\340A\003\022\023\n\006job_id\030\003 \001(\tB\003\340" + + "A\003\022I\n\005state\030\005 \001(\01625.google.cloud.datapro" + + "c.v1beta2.WorkflowNode.NodeStateB\003\340A\003\022\022\n" + + "\005error\030\006 \001(\tB\003\340A\003\"k\n\tNodeState\022\033\n\027NODE_S" + + "TATUS_UNSPECIFIED\020\000\022\013\n\007BLOCKED\020\001\022\014\n\010RUNN" + + "ABLE\020\002\022\013\n\007RUNNING\020\003\022\r\n\tCOMPLETED\020\004\022\n\n\006FA" + + "ILED\020\005\"\251\001\n\035CreateWorkflowTemplateRequest" + "\022@\n\006parent\030\001 \001(\tB0\340A\002\372A*\022(dataproc.googl" - + "eapis.com/WorkflowTemplate\022\021\n\tpage_size\030" - + "\002 \001(\005\022\022\n\npage_token\030\003 \001(\t\"\206\001\n\035ListWorkfl" - + "owTemplatesResponse\022G\n\ttemplates\030\001 \003(\0132/" - + ".google.cloud.dataproc.v1beta2.WorkflowT" - + "emplateB\003\340A\003\022\034\n\017next_page_token\030\002 \001(\tB\003\340" - + "A\003\"p\n\035DeleteWorkflowTemplateRequest\022>\n\004n" - + "ame\030\001 \001(\tB0\340A\002\372A*\n(dataproc.googleapis.c" - + "om/WorkflowTemplate\022\017\n\007version\030\002 \001(\0052\351\021\n" - + "\027WorkflowTemplateService\022\260\002\n\026CreateWorkf" - + "lowTemplate\022<.google.cloud.dataproc.v1be" - + "ta2.CreateWorkflowTemplateRequest\032/.goog" - + "le.cloud.dataproc.v1beta2.WorkflowTempla" - + "te\"\246\001\202\323\344\223\002\214\001\"8/v1beta2/{parent=projects/" - + "*/regions/*}/workflowTemplates:\010template" - + "ZF\":/v1beta2/{parent=projects/*/location" - + "s/*}/workflowTemplates:\010template\332A\020paren" - + "t, template\022\211\002\n\023GetWorkflowTemplate\0229.go" - + "ogle.cloud.dataproc.v1beta2.GetWorkflowT" - + "emplateRequest\032/.google.cloud.dataproc.v" - + "1beta2.WorkflowTemplate\"\205\001\202\323\344\223\002x\0228/v1bet" - + "a2/{name=projects/*/regions/*/workflowTe" - + "mplates/*}Z<\022:/v1beta2/{name=projects/*/" - + "locations/*/workflowTemplates/*}\332A\004name\022" - + "\345\002\n\033InstantiateWorkflowTemplate\022A.google" - + ".cloud.dataproc.v1beta2.InstantiateWorkf" - + "lowTemplateRequest\032\035.google.longrunning." - + "Operation\"\343\001\202\323\344\223\002\226\001\"D/v1beta2/{name=proj" - + "ects/*/regions/*/workflowTemplates/*}:in" - + "stantiate:\001*ZK\"F/v1beta2/{name=projects/" - + "*/locations/*/workflowTemplates/*}:insta" - + "ntiate:\001*\332A\004name\332A\020name, parameters\312A)\n\025" - + "google.protobuf.Empty\022\020WorkflowMetadata\022" - + "\204\003\n!InstantiateInlineWorkflowTemplate\022G." - + "google.cloud.dataproc.v1beta2.Instantiat" - + "eInlineWorkflowTemplateRequest\032\035.google." - + "longrunning.Operation\"\366\001\202\323\344\223\002\260\001\"L/v1beta" - + "2/{parent=projects/*/locations/*}/workfl" - + "owTemplates:instantiateInline:\010templateZ" - + "V\"J/v1beta2/{parent=projects/*/regions/*" - + "}/workflowTemplates:instantiateInline:\010t" - + "emplate\332A\020parent, template\312A)\n\025google.pr" - + "otobuf.Empty\022\020WorkflowMetadata\022\272\002\n\026Updat" - + "eWorkflowTemplate\022<.google.cloud.datapro" - + "c.v1beta2.UpdateWorkflowTemplateRequest\032" - + "/.google.cloud.dataproc.v1beta2.Workflow" - + "Template\"\260\001\202\323\344\223\002\236\001\032A/v1beta2/{template.n" - + "ame=projects/*/regions/*/workflowTemplat" - + "es/*}:\010templateZO\032C/v1beta2/{template.na" + + "eapis.com/WorkflowTemplate\022F\n\010template\030\002" + + " \001(\0132/.google.cloud.dataproc.v1beta2.Wor" + + "kflowTemplateB\003\340A\002\"m\n\032GetWorkflowTemplat" + + "eRequest\022>\n\004name\030\001 \001(\tB0\340A\002\372A*\n(dataproc" + + ".googleapis.com/WorkflowTemplate\022\017\n\007vers" + + "ion\030\002 \001(\005\"\274\002\n\"InstantiateWorkflowTemplat" + + "eRequest\022>\n\004name\030\001 \001(\tB0\340A\002\372A*\n(dataproc" + + ".googleapis.com/WorkflowTemplate\022\017\n\007vers" + + "ion\030\002 \001(\005\022\027\n\013instance_id\030\003 \001(\tB\002\030\001\022\022\n\nre" + + "quest_id\030\005 \001(\t\022e\n\nparameters\030\004 \003(\0132Q.goo" + + "gle.cloud.dataproc.v1beta2.InstantiateWo" + + "rkflowTemplateRequest.ParametersEntry\0321\n" + + "\017ParametersEntry\022\013\n\003key\030\001 \001(\t\022\r\n\005value\030\002" + + " \001(\t:\0028\001\"\335\001\n(InstantiateInlineWorkflowTe" + + "mplateRequest\022@\n\006parent\030\001 \001(\tB0\340A\002\372A*\022(d" + + "ataproc.googleapis.com/WorkflowTemplate\022" + + "F\n\010template\030\002 \001(\0132/.google.cloud.datapro" + + "c.v1beta2.WorkflowTemplateB\003\340A\002\022\023\n\013insta" + + "nce_id\030\003 \001(\t\022\022\n\nrequest_id\030\004 \001(\t\"g\n\035Upda" + + "teWorkflowTemplateRequest\022F\n\010template\030\001 " + + "\001(\0132/.google.cloud.dataproc.v1beta2.Work" + + "flowTemplateB\003\340A\002\"\207\001\n\034ListWorkflowTempla" + + "tesRequest\022@\n\006parent\030\001 \001(\tB0\340A\002\372A*\022(data" + + "proc.googleapis.com/WorkflowTemplate\022\021\n\t" + + "page_size\030\002 \001(\005\022\022\n\npage_token\030\003 \001(\t\"\206\001\n\035" + + "ListWorkflowTemplatesResponse\022G\n\ttemplat" + + "es\030\001 \003(\0132/.google.cloud.dataproc.v1beta2" + + ".WorkflowTemplateB\003\340A\003\022\034\n\017next_page_toke" + + "n\030\002 \001(\tB\003\340A\003\"p\n\035DeleteWorkflowTemplateRe" + + "quest\022>\n\004name\030\001 \001(\tB0\340A\002\372A*\n(dataproc.go" + + "ogleapis.com/WorkflowTemplate\022\017\n\007version" + + "\030\002 \001(\0052\351\021\n\027WorkflowTemplateService\022\260\002\n\026C" + + "reateWorkflowTemplate\022<.google.cloud.dat" + + "aproc.v1beta2.CreateWorkflowTemplateRequ" + + "est\032/.google.cloud.dataproc.v1beta2.Work" + + "flowTemplate\"\246\001\202\323\344\223\002\214\001\"8/v1beta2/{parent" + + "=projects/*/regions/*}/workflowTemplates" + + ":\010templateZF\":/v1beta2/{parent=projects/" + + "*/locations/*}/workflowTemplates:\010templa" + + "te\332A\020parent, template\022\211\002\n\023GetWorkflowTem" + + "plate\0229.google.cloud.dataproc.v1beta2.Ge" + + "tWorkflowTemplateRequest\032/.google.cloud." + + "dataproc.v1beta2.WorkflowTemplate\"\205\001\202\323\344\223" + + "\002x\0228/v1beta2/{name=projects/*/regions/*/" + + "workflowTemplates/*}Z<\022:/v1beta2/{name=p" + + "rojects/*/locations/*/workflowTemplates/" + + "*}\332A\004name\022\345\002\n\033InstantiateWorkflowTemplat" + + "e\022A.google.cloud.dataproc.v1beta2.Instan" + + "tiateWorkflowTemplateRequest\032\035.google.lo" + + "ngrunning.Operation\"\343\001\202\323\344\223\002\226\001\"D/v1beta2/" + + "{name=projects/*/regions/*/workflowTempl" + + "ates/*}:instantiate:\001*ZK\"F/v1beta2/{name" + + "=projects/*/locations/*/workflowTemplate" + + "s/*}:instantiate:\001*\332A\004name\332A\020name, param" + + "eters\312A)\n\025google.protobuf.Empty\022\020Workflo" + + "wMetadata\022\204\003\n!InstantiateInlineWorkflowT" + + "emplate\022G.google.cloud.dataproc.v1beta2." + + "InstantiateInlineWorkflowTemplateRequest" + + "\032\035.google.longrunning.Operation\"\366\001\202\323\344\223\002\260" + + "\001\"L/v1beta2/{parent=projects/*/locations" + + "/*}/workflowTemplates:instantiateInline:" + + "\010templateZV\"J/v1beta2/{parent=projects/*" + + "/regions/*}/workflowTemplates:instantiat" + + "eInline:\010template\332A\020parent, template\312A)\n" + + "\025google.protobuf.Empty\022\020WorkflowMetadata" + + "\022\272\002\n\026UpdateWorkflowTemplate\022<.google.clo" + + "ud.dataproc.v1beta2.UpdateWorkflowTempla" + + "teRequest\032/.google.cloud.dataproc.v1beta" + + "2.WorkflowTemplate\"\260\001\202\323\344\223\002\236\001\032A/v1beta2/{" + + "template.name=projects/*/regions/*/workf" + + "lowTemplates/*}:\010templateZO\032C/v1beta2/{t" + + "emplate.name=projects/*/locations/*/work" + + "flowTemplates/*}:\010template\332A\010template\022\234\002" + + "\n\025ListWorkflowTemplates\022;.google.cloud.d" + + "ataproc.v1beta2.ListWorkflowTemplatesReq" + + "uest\032<.google.cloud.dataproc.v1beta2.Lis" + + "tWorkflowTemplatesResponse\"\207\001\202\323\344\223\002x\0228/v1" + + "beta2/{parent=projects/*/regions/*}/work" + + "flowTemplatesZ<\022:/v1beta2/{parent=projec" + + "ts/*/locations/*}/workflowTemplates\332A\006pa" + + "rent\022\366\001\n\026DeleteWorkflowTemplate\022<.google" + + ".cloud.dataproc.v1beta2.DeleteWorkflowTe" + + "mplateRequest\032\026.google.protobuf.Empty\"\205\001" + + "\202\323\344\223\002x*8/v1beta2/{name=projects/*/region" + + "s/*/workflowTemplates/*}Z<*:/v1beta2/{na" + "me=projects/*/locations/*/workflowTempla" - + "tes/*}:\010template\332A\010template\022\234\002\n\025ListWork" - + "flowTemplates\022;.google.cloud.dataproc.v1" - + "beta2.ListWorkflowTemplatesRequest\032<.goo" - + "gle.cloud.dataproc.v1beta2.ListWorkflowT" - + "emplatesResponse\"\207\001\202\323\344\223\002x\0228/v1beta2/{par" - + "ent=projects/*/regions/*}/workflowTempla" - + "tesZ<\022:/v1beta2/{parent=projects/*/locat" - + "ions/*}/workflowTemplates\332A\006parent\022\366\001\n\026D" - + "eleteWorkflowTemplate\022<.google.cloud.dat" - + "aproc.v1beta2.DeleteWorkflowTemplateRequ" - + "est\032\026.google.protobuf.Empty\"\205\001\202\323\344\223\002x*8/v" - + "1beta2/{name=projects/*/regions/*/workfl" - + "owTemplates/*}Z<*:/v1beta2/{name=project" - + "s/*/locations/*/workflowTemplates/*}\332A\004n" - + "ame\032K\312A\027dataproc.googleapis.com\322A.https:" - + "//www.googleapis.com/auth/cloud-platform" - + "B\204\001\n!com.google.cloud.dataproc.v1beta2B\026" - + "WorkflowTemplatesProtoP\001ZEgoogle.golang." - + "org/genproto/googleapis/cloud/dataproc/v" - + "1beta2;dataprocb\006proto3" + + "tes/*}\332A\004name\032K\312A\027dataproc.googleapis.co" + + "m\322A.https://www.googleapis.com/auth/clou" + + "d-platformB\204\001\n!com.google.cloud.dataproc" + + ".v1beta2B\026WorkflowTemplatesProtoP\001ZEgoog" + + "le.golang.org/genproto/googleapis/cloud/" + + "dataproc/v1beta2;dataprocb\006proto3" }; descriptor = com.google.protobuf.Descriptors.FileDescriptor.internalBuildGeneratedFileFrom( @@ -352,6 +359,7 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { com.google.cloud.dataproc.v1beta2.ClustersProto.getDescriptor(), com.google.cloud.dataproc.v1beta2.JobsProto.getDescriptor(), com.google.longrunning.OperationsProto.getDescriptor(), + com.google.protobuf.DurationProto.getDescriptor(), com.google.protobuf.EmptyProto.getDescriptor(), com.google.protobuf.TimestampProto.getDescriptor(), }); @@ -370,6 +378,7 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { "Placement", "Jobs", "Parameters", + "DagTimeout", }); internal_static_google_cloud_dataproc_v1beta2_WorkflowTemplate_LabelsEntry_descriptor = internal_static_google_cloud_dataproc_v1beta2_WorkflowTemplate_descriptor @@ -502,6 +511,9 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { "StartTime", "EndTime", "ClusterUuid", + "DagTimeout", + "DagStartTime", + "DagEndTime", }); internal_static_google_cloud_dataproc_v1beta2_WorkflowMetadata_ParametersEntry_descriptor = internal_static_google_cloud_dataproc_v1beta2_WorkflowMetadata_descriptor @@ -630,6 +642,7 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { com.google.cloud.dataproc.v1beta2.ClustersProto.getDescriptor(); com.google.cloud.dataproc.v1beta2.JobsProto.getDescriptor(); com.google.longrunning.OperationsProto.getDescriptor(); + com.google.protobuf.DurationProto.getDescriptor(); com.google.protobuf.EmptyProto.getDescriptor(); com.google.protobuf.TimestampProto.getDescriptor(); } diff --git a/proto-google-cloud-dataproc-v1beta2/src/main/proto/google/cloud/dataproc/v1beta2/autoscaling_policies.proto b/proto-google-cloud-dataproc-v1beta2/src/main/proto/google/cloud/dataproc/v1beta2/autoscaling_policies.proto index a7d6376b..7601cca8 100644 --- a/proto-google-cloud-dataproc-v1beta2/src/main/proto/google/cloud/dataproc/v1beta2/autoscaling_policies.proto +++ b/proto-google-cloud-dataproc-v1beta2/src/main/proto/google/cloud/dataproc/v1beta2/autoscaling_policies.proto @@ -36,10 +36,12 @@ option (google.api.resource_definition) = { // Cloud Dataproc API. service AutoscalingPolicyService { option (google.api.default_host) = "dataproc.googleapis.com"; - option (google.api.oauth_scopes) = "https://www.googleapis.com/auth/cloud-platform"; + option (google.api.oauth_scopes) = + "https://www.googleapis.com/auth/cloud-platform"; // Creates new autoscaling policy. - rpc CreateAutoscalingPolicy(CreateAutoscalingPolicyRequest) returns (AutoscalingPolicy) { + rpc CreateAutoscalingPolicy(CreateAutoscalingPolicyRequest) + returns (AutoscalingPolicy) { option (google.api.http) = { post: "/v1beta2/{parent=projects/*/locations/*}/autoscalingPolicies" body: "policy" @@ -55,7 +57,8 @@ service AutoscalingPolicyService { // // Disabled check for update_mask, because all updates will be full // replacements. - rpc UpdateAutoscalingPolicy(UpdateAutoscalingPolicyRequest) returns (AutoscalingPolicy) { + rpc UpdateAutoscalingPolicy(UpdateAutoscalingPolicyRequest) + returns (AutoscalingPolicy) { option (google.api.http) = { put: "/v1beta2/{policy.name=projects/*/locations/*/autoscalingPolicies/*}" body: "policy" @@ -68,7 +71,8 @@ service AutoscalingPolicyService { } // Retrieves autoscaling policy. - rpc GetAutoscalingPolicy(GetAutoscalingPolicyRequest) returns (AutoscalingPolicy) { + rpc GetAutoscalingPolicy(GetAutoscalingPolicyRequest) + returns (AutoscalingPolicy) { option (google.api.http) = { get: "/v1beta2/{name=projects/*/locations/*/autoscalingPolicies/*}" additional_bindings { @@ -79,7 +83,8 @@ service AutoscalingPolicyService { } // Lists autoscaling policies in the project. - rpc ListAutoscalingPolicies(ListAutoscalingPoliciesRequest) returns (ListAutoscalingPoliciesResponse) { + rpc ListAutoscalingPolicies(ListAutoscalingPoliciesRequest) + returns (ListAutoscalingPoliciesResponse) { option (google.api.http) = { get: "/v1beta2/{parent=projects/*/locations/*}/autoscalingPolicies" additional_bindings { @@ -91,7 +96,8 @@ service AutoscalingPolicyService { // Deletes an autoscaling policy. It is an error to delete an autoscaling // policy that is in use by one or more clusters. - rpc DeleteAutoscalingPolicy(DeleteAutoscalingPolicyRequest) returns (google.protobuf.Empty) { + rpc DeleteAutoscalingPolicy(DeleteAutoscalingPolicyRequest) + returns (google.protobuf.Empty) { option (google.api.http) = { delete: "/v1beta2/{name=projects/*/locations/*/autoscalingPolicies/*}" additional_bindings { @@ -136,22 +142,26 @@ message AutoscalingPolicy { } // Required. Describes how the autoscaler will operate for primary workers. - InstanceGroupAutoscalingPolicyConfig worker_config = 4 [(google.api.field_behavior) = REQUIRED]; + InstanceGroupAutoscalingPolicyConfig worker_config = 4 + [(google.api.field_behavior) = REQUIRED]; // Optional. Describes how the autoscaler will operate for secondary workers. - InstanceGroupAutoscalingPolicyConfig secondary_worker_config = 5 [(google.api.field_behavior) = OPTIONAL]; + InstanceGroupAutoscalingPolicyConfig secondary_worker_config = 5 + [(google.api.field_behavior) = OPTIONAL]; } // Basic algorithm for autoscaling. message BasicAutoscalingAlgorithm { // Required. YARN autoscaling configuration. - BasicYarnAutoscalingConfig yarn_config = 1 [(google.api.field_behavior) = REQUIRED]; + BasicYarnAutoscalingConfig yarn_config = 1 + [(google.api.field_behavior) = REQUIRED]; // Optional. Duration between scaling events. A scaling period starts after // the update operation from the previous event has completed. // // Bounds: [2m, 1d]. Default: 2m. - google.protobuf.Duration cooldown_period = 2 [(google.api.field_behavior) = OPTIONAL]; + google.protobuf.Duration cooldown_period = 2 + [(google.api.field_behavior) = OPTIONAL]; } // Basic autoscaling configurations for YARN. @@ -162,22 +172,29 @@ message BasicYarnAutoscalingConfig { // downscaling operations. // // Bounds: [0s, 1d]. - google.protobuf.Duration graceful_decommission_timeout = 5 [(google.api.field_behavior) = REQUIRED]; - - // Required. Fraction of average pending memory in the last cooldown period - // for which to add workers. A scale-up factor of 1.0 will result in scaling - // up so that there is no pending memory remaining after the update (more - // aggressive scaling). A scale-up factor closer to 0 will result in a smaller - // magnitude of scaling up (less aggressive scaling). + google.protobuf.Duration graceful_decommission_timeout = 5 + [(google.api.field_behavior) = REQUIRED]; + + // Required. Fraction of average YARN pending memory in the last cooldown + // period for which to add workers. A scale-up factor of 1.0 will result in + // scaling up so that there is no pending memory remaining after the update + // (more aggressive scaling). A scale-up factor closer to 0 will result in a + // smaller magnitude of scaling up (less aggressive scaling). See [How + // autoscaling + // works](https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/autoscaling#how_autoscaling_works) + // for more information. // // Bounds: [0.0, 1.0]. double scale_up_factor = 1 [(google.api.field_behavior) = REQUIRED]; - // Required. Fraction of average pending memory in the last cooldown period - // for which to remove workers. A scale-down factor of 1 will result in + // Required. Fraction of average YARN pending memory in the last cooldown + // period for which to remove workers. A scale-down factor of 1 will result in // scaling down so that there is no available memory remaining after the // update (more aggressive scaling). A scale-down factor of 0 disables // removing workers, which can be beneficial for autoscaling a single job. + // See [How autoscaling + // works](https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/autoscaling#how_autoscaling_works) + // for more information. // // Bounds: [0.0, 1.0]. double scale_down_factor = 2 [(google.api.field_behavior) = REQUIRED]; @@ -189,7 +206,8 @@ message BasicYarnAutoscalingConfig { // on any recommended change. // // Bounds: [0.0, 1.0]. Default: 0.0. - double scale_up_min_worker_fraction = 3 [(google.api.field_behavior) = OPTIONAL]; + double scale_up_min_worker_fraction = 3 + [(google.api.field_behavior) = OPTIONAL]; // Optional. Minimum scale-down threshold as a fraction of total cluster size // before scaling occurs. For example, in a 20-worker cluster, a threshold of @@ -198,7 +216,8 @@ message BasicYarnAutoscalingConfig { // on any recommended change. // // Bounds: [0.0, 1.0]. Default: 0.0. - double scale_down_min_worker_fraction = 4 [(google.api.field_behavior) = OPTIONAL]; + double scale_down_min_worker_fraction = 4 + [(google.api.field_behavior) = OPTIONAL]; } // Configuration for the size bounds of an instance group, including its @@ -341,7 +360,8 @@ message ListAutoscalingPoliciesRequest { // A response to a request to list autoscaling policies in a project. message ListAutoscalingPoliciesResponse { // Output only. Autoscaling policies list. - repeated AutoscalingPolicy policies = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; + repeated AutoscalingPolicy policies = 1 + [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. This token is included in the response if there are more // results to fetch. diff --git a/proto-google-cloud-dataproc-v1beta2/src/main/proto/google/cloud/dataproc/v1beta2/clusters.proto b/proto-google-cloud-dataproc-v1beta2/src/main/proto/google/cloud/dataproc/v1beta2/clusters.proto index 93e6fe79..8af436e0 100644 --- a/proto-google-cloud-dataproc-v1beta2/src/main/proto/google/cloud/dataproc/v1beta2/clusters.proto +++ b/proto-google-cloud-dataproc-v1beta2/src/main/proto/google/cloud/dataproc/v1beta2/clusters.proto @@ -30,17 +30,27 @@ option go_package = "google.golang.org/genproto/googleapis/cloud/dataproc/v1beta option java_multiple_files = true; option java_outer_classname = "ClustersProto"; option java_package = "com.google.cloud.dataproc.v1beta2"; +option (google.api.resource_definition) = { + type: "container.googleapis.com/Cluster" + pattern: "projects/{project}/locations/{location}/clusters/{cluster}" +}; +option (google.api.resource_definition) = { + type: "metastore.googleapis.com/Service" + pattern: "projects/{project}/locations/{location}/services/{service}" +}; // The ClusterControllerService provides methods to manage clusters // of Compute Engine instances. service ClusterController { option (google.api.default_host) = "dataproc.googleapis.com"; - option (google.api.oauth_scopes) = "https://www.googleapis.com/auth/cloud-platform"; + option (google.api.oauth_scopes) = + "https://www.googleapis.com/auth/cloud-platform"; // Creates a cluster in a project. The returned // [Operation.metadata][google.longrunning.Operation.metadata] will be // [ClusterOperationMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1beta2#clusteroperationmetadata). - rpc CreateCluster(CreateClusterRequest) returns (google.longrunning.Operation) { + rpc CreateCluster(CreateClusterRequest) + returns (google.longrunning.Operation) { option (google.api.http) = { post: "/v1beta2/projects/{project_id}/regions/{region}/clusters" body: "cluster" @@ -55,12 +65,14 @@ service ClusterController { // Updates a cluster in a project. The returned // [Operation.metadata][google.longrunning.Operation.metadata] will be // [ClusterOperationMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1beta2#clusteroperationmetadata). - rpc UpdateCluster(UpdateClusterRequest) returns (google.longrunning.Operation) { + rpc UpdateCluster(UpdateClusterRequest) + returns (google.longrunning.Operation) { option (google.api.http) = { patch: "/v1beta2/projects/{project_id}/regions/{region}/clusters/{cluster_name}" body: "cluster" }; - option (google.api.method_signature) = "project_id, region, cluster_name, cluster, update_mask"; + option (google.api.method_signature) = + "project_id, region, cluster_name, cluster, update_mask"; option (google.longrunning.operation_info) = { response_type: "Cluster" metadata_type: "google.cloud.dataproc.v1beta2.ClusterOperationMetadata" @@ -70,7 +82,8 @@ service ClusterController { // Deletes a cluster in a project. The returned // [Operation.metadata][google.longrunning.Operation.metadata] will be // [ClusterOperationMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1beta2#clusteroperationmetadata). - rpc DeleteCluster(DeleteClusterRequest) returns (google.longrunning.Operation) { + rpc DeleteCluster(DeleteClusterRequest) + returns (google.longrunning.Operation) { option (google.api.http) = { delete: "/v1beta2/projects/{project_id}/regions/{region}/clusters/{cluster_name}" }; @@ -105,7 +118,8 @@ service ClusterController { // [Operation.response][google.longrunning.Operation.response] // contains // [Empty][google.protobuf.Empty]. - rpc DiagnoseCluster(DiagnoseClusterRequest) returns (google.longrunning.Operation) { + rpc DiagnoseCluster(DiagnoseClusterRequest) + returns (google.longrunning.Operation) { option (google.api.http) = { post: "/v1beta2/projects/{project_id}/regions/{region}/clusters/{cluster_name}:diagnose" body: "*" @@ -145,7 +159,8 @@ message Cluster { ClusterStatus status = 4 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. The previous cluster status. - repeated ClusterStatus status_history = 7 [(google.api.field_behavior) = OUTPUT_ONLY]; + repeated ClusterStatus status_history = 7 + [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. A cluster UUID (Unique Universal Identifier). Dataproc // generates this value when it creates the cluster. @@ -171,27 +186,41 @@ message ClusterConfig { // bucket](https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/staging-bucket)). string config_bucket = 1 [(google.api.field_behavior) = OPTIONAL]; + // Optional. A Cloud Storage bucket used to store ephemeral cluster and jobs + // data, such as Spark and MapReduce history files. If you do not specify a + // temp bucket, Dataproc will determine a Cloud Storage location (US, ASIA, or + // EU) for your cluster's temp bucket according to the Compute Engine zone + // where your cluster is deployed, and then create and manage this + // project-level, per-location bucket. The default bucket has a TTL of 90 + // days, but you can use any TTL (or none) if you specify a bucket. + string temp_bucket = 2 [(google.api.field_behavior) = OPTIONAL]; + // Optional. The shared Compute Engine config settings for // all instances in a cluster. - GceClusterConfig gce_cluster_config = 8 [(google.api.field_behavior) = OPTIONAL]; + GceClusterConfig gce_cluster_config = 8 + [(google.api.field_behavior) = OPTIONAL]; // Optional. The Compute Engine config settings for // the master instance in a cluster. - InstanceGroupConfig master_config = 9 [(google.api.field_behavior) = OPTIONAL]; + InstanceGroupConfig master_config = 9 + [(google.api.field_behavior) = OPTIONAL]; // Optional. The Compute Engine config settings for // worker instances in a cluster. - InstanceGroupConfig worker_config = 10 [(google.api.field_behavior) = OPTIONAL]; + InstanceGroupConfig worker_config = 10 + [(google.api.field_behavior) = OPTIONAL]; // Optional. The Compute Engine config settings for // additional worker instances in a cluster. - InstanceGroupConfig secondary_worker_config = 12 [(google.api.field_behavior) = OPTIONAL]; + InstanceGroupConfig secondary_worker_config = 12 + [(google.api.field_behavior) = OPTIONAL]; // Optional. The config settings for software inside the cluster. SoftwareConfig software_config = 13 [(google.api.field_behavior) = OPTIONAL]; // Optional. The config setting for auto delete cluster schedule. - LifecycleConfig lifecycle_config = 14 [(google.api.field_behavior) = OPTIONAL]; + LifecycleConfig lifecycle_config = 14 + [(google.api.field_behavior) = OPTIONAL]; // Optional. Commands to execute on each node after config is // completed. By default, executables are run on master and all worker nodes. @@ -206,14 +235,17 @@ message ClusterConfig { // else // ... worker specific actions ... // fi - repeated NodeInitializationAction initialization_actions = 11 [(google.api.field_behavior) = OPTIONAL]; + repeated NodeInitializationAction initialization_actions = 11 + [(google.api.field_behavior) = OPTIONAL]; // Optional. Encryption settings for the cluster. - EncryptionConfig encryption_config = 15 [(google.api.field_behavior) = OPTIONAL]; + EncryptionConfig encryption_config = 15 + [(google.api.field_behavior) = OPTIONAL]; // Optional. Autoscaling config for the policy associated with the cluster. // Cluster does not autoscale if this field is unset. - AutoscalingConfig autoscaling_config = 16 [(google.api.field_behavior) = OPTIONAL]; + AutoscalingConfig autoscaling_config = 16 + [(google.api.field_behavior) = OPTIONAL]; // Optional. Port/endpoint configuration for this cluster EndpointConfig endpoint_config = 17 [(google.api.field_behavior) = OPTIONAL]; @@ -221,11 +253,12 @@ message ClusterConfig { // Optional. Security related configuration. SecurityConfig security_config = 18 [(google.api.field_behavior) = OPTIONAL]; - // Optional. The Kubernetes Engine config for Dataproc clusters deployed to Kubernetes. - // Setting this is considered mutually exclusive with Compute Engine-based - // options such as `gce_cluster_config`, `master_config`, `worker_config`, - // `secondary_worker_config`, and `autoscaling_config`. - GkeClusterConfig gke_cluster_config = 19 [(google.api.field_behavior) = OPTIONAL]; + // Optional. The Kubernetes Engine config for Dataproc clusters deployed to + // Kubernetes. Setting this is considered mutually exclusive with Compute + // Engine-based options such as `gce_cluster_config`, `master_config`, + // `worker_config`, `secondary_worker_config`, and `autoscaling_config`. + GkeClusterConfig gke_cluster_config = 19 + [(google.api.field_behavior) = OPTIONAL]; } // The GKE config for this cluster. @@ -246,14 +279,16 @@ message GkeClusterConfig { } // Optional. A target for the deployment. - NamespacedGkeDeploymentTarget namespaced_gke_deployment_target = 1 [(google.api.field_behavior) = OPTIONAL]; + NamespacedGkeDeploymentTarget namespaced_gke_deployment_target = 1 + [(google.api.field_behavior) = OPTIONAL]; } // Endpoint config for this cluster message EndpointConfig { // Output only. The map of port descriptions to URLs. Will only be populated // if enable_http_port_access is true. - map http_ports = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; + map http_ports = 1 + [(google.api.field_behavior) = OUTPUT_ONLY]; // Optional. If true, enable http access to specific ports on the cluster // from external sources. Defaults to false. @@ -330,7 +365,7 @@ message GceClusterConfig { bool internal_ip_only = 7 [(google.api.field_behavior) = OPTIONAL]; // Optional. The [Dataproc service - // account](https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/service-accounts#service_accounts_in_cloud_dataproc) + // account](https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/service-accounts#service_accounts_in_dataproc) // (also see [VM Data Plane // identity](https://cloud.google.com/dataproc/docs/concepts/iam/dataproc-principals#vm_service_account_data_plane_identity)) // used by Dataproc cluster VM instances to access Google Cloud Platform @@ -356,7 +391,8 @@ message GceClusterConfig { // * https://www.googleapis.com/auth/bigtable.admin.table // * https://www.googleapis.com/auth/bigtable.data // * https://www.googleapis.com/auth/devstorage.full_control - repeated string service_account_scopes = 3 [(google.api.field_behavior) = OPTIONAL]; + repeated string service_account_scopes = 3 + [(google.api.field_behavior) = OPTIONAL]; // The Compute Engine tags to add to all instances (see [Tagging // instances](https://cloud.google.com/compute/docs/label-or-tag-resources#tags)). @@ -368,19 +404,42 @@ message GceClusterConfig { map metadata = 5; // Optional. Reservation Affinity for consuming Zonal reservation. - ReservationAffinity reservation_affinity = 11 [(google.api.field_behavior) = OPTIONAL]; + ReservationAffinity reservation_affinity = 11 + [(google.api.field_behavior) = OPTIONAL]; } // The config settings for Compute Engine resources in // an instance group, such as a master or worker group. message InstanceGroupConfig { + // Controls the use of + // [preemptible instances] + // (https://cloud.google.com/compute/docs/instances/preemptible) + // within the group. + enum Preemptibility { + // Preemptibility is unspecified, the system will choose the + // appropriate setting for each instance group. + PREEMPTIBILITY_UNSPECIFIED = 0; + + // Instances are non-preemptible. + // + // This option is allowed for all instance groups and is the only valid + // value for Master and Worker instance groups. + NON_PREEMPTIBLE = 1; + + // Instances are preemptible. + // + // This option is allowed only for secondary worker groups. + PREEMPTIBLE = 2; + } + // Optional. The number of VM instances in the instance group. // For master instance groups, must be set to 1. int32 num_instances = 1 [(google.api.field_behavior) = OPTIONAL]; // Output only. The list of instance names. Dataproc derives the names // from `cluster_name`, `num_instances`, and the instance group. - repeated string instance_names = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; + repeated string instance_names = 2 + [(google.api.field_behavior) = OUTPUT_ONLY]; // Optional. The Compute Engine image resource used for cluster instances. // @@ -424,14 +483,25 @@ message InstanceGroupConfig { // instances. bool is_preemptible = 6 [(google.api.field_behavior) = OUTPUT_ONLY]; + // Optional. Specifies the preemptibility of the instance group. + // + // The default value for master and worker groups is + // `NON_PREEMPTIBLE`. This default cannot be changed. + // + // The default value for secondary instances is + // `PREEMPTIBLE`. + Preemptibility preemptibility = 10 [(google.api.field_behavior) = OPTIONAL]; + // Output only. The config for Compute Engine Instance Group // Manager that manages this group. // This is only used for preemptible instance groups. - ManagedGroupConfig managed_group_config = 7 [(google.api.field_behavior) = OUTPUT_ONLY]; + ManagedGroupConfig managed_group_config = 7 + [(google.api.field_behavior) = OUTPUT_ONLY]; // Optional. The Compute Engine accelerator configuration for these // instances. - repeated AcceleratorConfig accelerators = 8 [(google.api.field_behavior) = OPTIONAL]; + repeated AcceleratorConfig accelerators = 8 + [(google.api.field_behavior) = OPTIONAL]; // Specifies the minimum cpu platform for the Instance Group. // See [Dataproc -> Minimum CPU @@ -446,7 +516,8 @@ message ManagedGroupConfig { string instance_template_name = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. The name of the Instance Group Manager for this group. - string instance_group_manager_name = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; + string instance_group_manager_name = 2 + [(google.api.field_behavior) = OUTPUT_ONLY]; } // Specifies the type and number of accelerator cards attached to the instances @@ -500,27 +571,32 @@ message LifecycleConfig { // deleted. Minimum value is 10 minutes; maximum value is 14 days (see JSON // representation of // [Duration](https://developers.google.com/protocol-buffers/docs/proto3#json). - google.protobuf.Duration idle_delete_ttl = 1 [(google.api.field_behavior) = OPTIONAL]; + google.protobuf.Duration idle_delete_ttl = 1 + [(google.api.field_behavior) = OPTIONAL]; // Either the exact time the cluster should be deleted at or // the cluster maximum age. oneof ttl { - // Optional. The time when cluster will be auto-deleted. (see JSON representation of + // Optional. The time when cluster will be auto-deleted. (see JSON + // representation of // [Timestamp](https://developers.google.com/protocol-buffers/docs/proto3#json)). - google.protobuf.Timestamp auto_delete_time = 2 [(google.api.field_behavior) = OPTIONAL]; + google.protobuf.Timestamp auto_delete_time = 2 + [(google.api.field_behavior) = OPTIONAL]; // Optional. The lifetime duration of cluster. The cluster will be // auto-deleted at the end of this period. Minimum value is 10 minutes; // maximum value is 14 days (see JSON representation of // [Duration](https://developers.google.com/protocol-buffers/docs/proto3#json)). - google.protobuf.Duration auto_delete_ttl = 3 [(google.api.field_behavior) = OPTIONAL]; + google.protobuf.Duration auto_delete_ttl = 3 + [(google.api.field_behavior) = OPTIONAL]; } // Output only. The time when cluster became idle (most recent job finished) // and became eligible for deletion due to idleness (see JSON representation // of // [Timestamp](https://developers.google.com/protocol-buffers/docs/proto3#json)). - google.protobuf.Timestamp idle_start_time = 4 [(google.api.field_behavior) = OUTPUT_ONLY]; + google.protobuf.Timestamp idle_start_time = 4 + [(google.api.field_behavior) = OUTPUT_ONLY]; } // Security related configuration, including encryption, Kerberos, etc. @@ -531,13 +607,14 @@ message SecurityConfig { // Specifies Kerberos related configuration. message KerberosConfig { - // Optional. Flag to indicate whether to Kerberize the cluster (default: false). Set - // this field to true to enable Kerberos on a cluster. + // Optional. Flag to indicate whether to Kerberize the cluster (default: + // false). Set this field to true to enable Kerberos on a cluster. bool enable_kerberos = 1 [(google.api.field_behavior) = OPTIONAL]; // Required. The Cloud Storage URI of a KMS encrypted file containing the root // principal password. - string root_principal_password_uri = 2 [(google.api.field_behavior) = REQUIRED]; + string root_principal_password_uri = 2 + [(google.api.field_behavior) = REQUIRED]; // Required. The uri of the KMS key used to encrypt various sensitive // files. @@ -578,12 +655,14 @@ message KerberosConfig { // Optional. The admin server (IP or hostname) for the remote trusted realm in // a cross realm trust relationship. - string cross_realm_trust_admin_server = 11 [(google.api.field_behavior) = OPTIONAL]; + string cross_realm_trust_admin_server = 11 + [(google.api.field_behavior) = OPTIONAL]; // Optional. The Cloud Storage URI of a KMS encrypted file containing the // shared password between the on-cluster Kerberos realm and the remote // trusted realm, in a cross realm trust relationship. - string cross_realm_trust_shared_password_uri = 12 [(google.api.field_behavior) = OPTIONAL]; + string cross_realm_trust_shared_password_uri = 12 + [(google.api.field_behavior) = OPTIONAL]; // Optional. The Cloud Storage URI of a KMS encrypted file containing the // master key of the KDC database. @@ -612,7 +691,8 @@ message NodeInitializationAction { // Cluster creation fails with an explanatory error message (the // name of the executable that caused the error and the exceeded timeout // period) if the executable is not completed at end of the timeout period. - google.protobuf.Duration execution_timeout = 2 [(google.api.field_behavior) = OPTIONAL]; + google.protobuf.Duration execution_timeout = 2 + [(google.api.field_behavior) = OPTIONAL]; } // The status of a cluster and its instances. @@ -674,7 +754,8 @@ message ClusterStatus { // Output only. Time when this state was entered (see JSON representation of // [Timestamp](https://developers.google.com/protocol-buffers/docs/proto3#json)). - google.protobuf.Timestamp state_start_time = 3 [(google.api.field_behavior) = OUTPUT_ONLY]; + google.protobuf.Timestamp state_start_time = 3 + [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. Additional state information that includes // status reported by the agent. @@ -685,7 +766,7 @@ message ClusterStatus { message SoftwareConfig { // Optional. The version of software inside the cluster. It must be one of the // supported [Dataproc - // Versions](https://cloud.google.com/dataproc/docs/concepts/versioning/dataproc-versions#supported_cloud_dataproc_versions), + // Versions](https://cloud.google.com/dataproc/docs/concepts/versioning/dataproc-versions#supported_dataproc_versions), // such as "1.2" (including a subminor version, such as "1.2.29"), or the // ["preview" // version](https://cloud.google.com/dataproc/docs/concepts/versioning/dataproc-versions#other_versions). @@ -741,10 +822,11 @@ message CreateClusterRequest { Cluster cluster = 2 [(google.api.field_behavior) = REQUIRED]; // Optional. A unique id used to identify the request. If the server - // receives two [CreateClusterRequest][google.cloud.dataproc.v1beta2.CreateClusterRequest] requests with the same - // id, then the second request will be ignored and the - // first [google.longrunning.Operation][google.longrunning.Operation] created and stored in the backend - // is returned. + // receives two + // [CreateClusterRequest][google.cloud.dataproc.v1beta2.CreateClusterRequest] + // requests with the same id, then the second request will be ignored and the + // first [google.longrunning.Operation][google.longrunning.Operation] created + // and stored in the backend is returned. // // It is recommended to always set this value to a // [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier). @@ -778,7 +860,8 @@ message UpdateClusterRequest { // [Duration](https://developers.google.com/protocol-buffers/docs/proto3#json)). // // Only supported on Dataproc image versions 1.2 and higher. - google.protobuf.Duration graceful_decommission_timeout = 6 [(google.api.field_behavior) = OPTIONAL]; + google.protobuf.Duration graceful_decommission_timeout = 6 + [(google.api.field_behavior) = OPTIONAL]; // Required. Specifies the path, relative to `Cluster`, of // the field to update. For example, to change the number of workers @@ -840,13 +923,15 @@ message UpdateClusterRequest { // autoscaling policies // // - google.protobuf.FieldMask update_mask = 4 [(google.api.field_behavior) = REQUIRED]; + google.protobuf.FieldMask update_mask = 4 + [(google.api.field_behavior) = REQUIRED]; // Optional. A unique id used to identify the request. If the server - // receives two [UpdateClusterRequest][google.cloud.dataproc.v1beta2.UpdateClusterRequest] requests with the same - // id, then the second request will be ignored and the - // first [google.longrunning.Operation][google.longrunning.Operation] created and stored in the - // backend is returned. + // receives two + // [UpdateClusterRequest][google.cloud.dataproc.v1beta2.UpdateClusterRequest] + // requests with the same id, then the second request will be ignored and the + // first [google.longrunning.Operation][google.longrunning.Operation] created + // and stored in the backend is returned. // // It is recommended to always set this value to a // [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier). @@ -873,10 +958,11 @@ message DeleteClusterRequest { string cluster_uuid = 4 [(google.api.field_behavior) = OPTIONAL]; // Optional. A unique id used to identify the request. If the server - // receives two [DeleteClusterRequest][google.cloud.dataproc.v1beta2.DeleteClusterRequest] requests with the same - // id, then the second request will be ignored and the - // first [google.longrunning.Operation][google.longrunning.Operation] created and stored in the - // backend is returned. + // receives two + // [DeleteClusterRequest][google.cloud.dataproc.v1beta2.DeleteClusterRequest] + // requests with the same id, then the second request will be ignored and the + // first [google.longrunning.Operation][google.longrunning.Operation] created + // and stored in the backend is returned. // // It is recommended to always set this value to a // [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier). diff --git a/proto-google-cloud-dataproc-v1beta2/src/main/proto/google/cloud/dataproc/v1beta2/jobs.proto b/proto-google-cloud-dataproc-v1beta2/src/main/proto/google/cloud/dataproc/v1beta2/jobs.proto index 9d9aaae0..c99f6791 100644 --- a/proto-google-cloud-dataproc-v1beta2/src/main/proto/google/cloud/dataproc/v1beta2/jobs.proto +++ b/proto-google-cloud-dataproc-v1beta2/src/main/proto/google/cloud/dataproc/v1beta2/jobs.proto @@ -224,12 +224,12 @@ message SparkJob { // Spark driver and tasks. repeated string jar_file_uris = 4 [(google.api.field_behavior) = OPTIONAL]; - // Optional. HCFS URIs of files to be copied to the working directory of - // Spark drivers and distributed tasks. Useful for naively parallel tasks. + // Optional. HCFS URIs of files to be placed in the working directory of + // each executor. Useful for naively parallel tasks. repeated string file_uris = 5 [(google.api.field_behavior) = OPTIONAL]; - // Optional. HCFS URIs of archives to be extracted in the working directory - // of Spark drivers and tasks. Supported file types: + // Optional. HCFS URIs of archives to be extracted into the working directory + // of each executor. Supported file types: // .jar, .tar, .tar.gz, .tgz, and .zip. repeated string archive_uris = 6 [(google.api.field_behavior) = OPTIONAL]; @@ -265,11 +265,12 @@ message PySparkJob { // Python driver and tasks. repeated string jar_file_uris = 4 [(google.api.field_behavior) = OPTIONAL]; - // Optional. HCFS URIs of files to be copied to the working directory of - // Python drivers and distributed tasks. Useful for naively parallel tasks. + // Optional. HCFS URIs of files to be placed in the working directory of + // each executor. Useful for naively parallel tasks. repeated string file_uris = 5 [(google.api.field_behavior) = OPTIONAL]; - // Optional. HCFS URIs of archives to be extracted in the working directory of + // Optional. HCFS URIs of archives to be extracted into the working directory + // of each executor. Supported file types: // .jar, .tar, .tar.gz, .tgz, and .zip. repeated string archive_uris = 6 [(google.api.field_behavior) = OPTIONAL]; @@ -414,12 +415,12 @@ message SparkRJob { // occur that causes an incorrect job submission. repeated string args = 2 [(google.api.field_behavior) = OPTIONAL]; - // Optional. HCFS URIs of files to be copied to the working directory of - // R drivers and distributed tasks. Useful for naively parallel tasks. + // Optional. HCFS URIs of files to be placed in the working directory of + // each executor. Useful for naively parallel tasks. repeated string file_uris = 3 [(google.api.field_behavior) = OPTIONAL]; - // Optional. HCFS URIs of archives to be extracted in the working directory of - // Spark drivers and tasks. Supported file types: + // Optional. HCFS URIs of archives to be extracted into the working directory + // of each executor. Supported file types: // .jar, .tar, .tar.gz, .tgz, and .zip. repeated string archive_uris = 4 [(google.api.field_behavior) = OPTIONAL]; @@ -562,9 +563,9 @@ message JobStatus { // Encapsulates the full scoping used to reference a job. message JobReference { - // Required. The ID of the Google Cloud Platform project that the job - // belongs to. - string project_id = 1 [(google.api.field_behavior) = REQUIRED]; + // Optional. The ID of the Google Cloud Platform project that the job belongs to. If + // specified, must match the request project ID. + string project_id = 1 [(google.api.field_behavior) = OPTIONAL]; // Optional. The job ID, which must be unique within the project. // The ID must contain only letters (a-z, A-Z), numbers (0-9), diff --git a/proto-google-cloud-dataproc-v1beta2/src/main/proto/google/cloud/dataproc/v1beta2/shared.proto b/proto-google-cloud-dataproc-v1beta2/src/main/proto/google/cloud/dataproc/v1beta2/shared.proto index 130ae554..ac474aa5 100644 --- a/proto-google-cloud-dataproc-v1beta2/src/main/proto/google/cloud/dataproc/v1beta2/shared.proto +++ b/proto-google-cloud-dataproc-v1beta2/src/main/proto/google/cloud/dataproc/v1beta2/shared.proto @@ -25,20 +25,17 @@ option java_package = "com.google.cloud.dataproc.v1beta2"; // Cluster components that can be activated. enum Component { - // Unspecified component. + // Unspecified component. Specifying this will cause Cluster creation to fail. COMPONENT_UNSPECIFIED = 0; // The Anaconda python distribution. ANACONDA = 5; - // Docker - DOCKER = 13; - // The Druid query engine. DRUID = 9; - // Flink - FLINK = 14; + // HBase. + HBASE = 11; // The Hive Web HCatalog (the REST service for accessing HCatalog). HIVE_WEBHCAT = 3; diff --git a/proto-google-cloud-dataproc-v1beta2/src/main/proto/google/cloud/dataproc/v1beta2/workflow_templates.proto b/proto-google-cloud-dataproc-v1beta2/src/main/proto/google/cloud/dataproc/v1beta2/workflow_templates.proto index e5ef680b..48f2f719 100644 --- a/proto-google-cloud-dataproc-v1beta2/src/main/proto/google/cloud/dataproc/v1beta2/workflow_templates.proto +++ b/proto-google-cloud-dataproc-v1beta2/src/main/proto/google/cloud/dataproc/v1beta2/workflow_templates.proto @@ -23,6 +23,7 @@ import "google/api/resource.proto"; import "google/cloud/dataproc/v1beta2/clusters.proto"; import "google/cloud/dataproc/v1beta2/jobs.proto"; import "google/longrunning/operations.proto"; +import "google/protobuf/duration.proto"; import "google/protobuf/empty.proto"; import "google/protobuf/timestamp.proto"; @@ -35,10 +36,12 @@ option java_package = "com.google.cloud.dataproc.v1beta2"; // Dataproc API. service WorkflowTemplateService { option (google.api.default_host) = "dataproc.googleapis.com"; - option (google.api.oauth_scopes) = "https://www.googleapis.com/auth/cloud-platform"; + option (google.api.oauth_scopes) = + "https://www.googleapis.com/auth/cloud-platform"; // Creates new workflow template. - rpc CreateWorkflowTemplate(CreateWorkflowTemplateRequest) returns (WorkflowTemplate) { + rpc CreateWorkflowTemplate(CreateWorkflowTemplateRequest) + returns (WorkflowTemplate) { option (google.api.http) = { post: "/v1beta2/{parent=projects/*/regions/*}/workflowTemplates" body: "template" @@ -54,7 +57,8 @@ service WorkflowTemplateService { // // Can retrieve previously instantiated template by specifying optional // version parameter. - rpc GetWorkflowTemplate(GetWorkflowTemplateRequest) returns (WorkflowTemplate) { + rpc GetWorkflowTemplate(GetWorkflowTemplateRequest) + returns (WorkflowTemplate) { option (google.api.http) = { get: "/v1beta2/{name=projects/*/regions/*/workflowTemplates/*}" additional_bindings { @@ -84,7 +88,8 @@ service WorkflowTemplateService { // On successful completion, // [Operation.response][google.longrunning.Operation.response] will be // [Empty][google.protobuf.Empty]. - rpc InstantiateWorkflowTemplate(InstantiateWorkflowTemplateRequest) returns (google.longrunning.Operation) { + rpc InstantiateWorkflowTemplate(InstantiateWorkflowTemplateRequest) + returns (google.longrunning.Operation) { option (google.api.http) = { post: "/v1beta2/{name=projects/*/regions/*/workflowTemplates/*}:instantiate" body: "*" @@ -104,7 +109,8 @@ service WorkflowTemplateService { // Instantiates a template and begins execution. // // This method is equivalent to executing the sequence - // [CreateWorkflowTemplate][google.cloud.dataproc.v1beta2.WorkflowTemplateService.CreateWorkflowTemplate], [InstantiateWorkflowTemplate][google.cloud.dataproc.v1beta2.WorkflowTemplateService.InstantiateWorkflowTemplate], + // [CreateWorkflowTemplate][google.cloud.dataproc.v1beta2.WorkflowTemplateService.CreateWorkflowTemplate], + // [InstantiateWorkflowTemplate][google.cloud.dataproc.v1beta2.WorkflowTemplateService.InstantiateWorkflowTemplate], // [DeleteWorkflowTemplate][google.cloud.dataproc.v1beta2.WorkflowTemplateService.DeleteWorkflowTemplate]. // // The returned Operation can be used to track execution of @@ -125,7 +131,9 @@ service WorkflowTemplateService { // On successful completion, // [Operation.response][google.longrunning.Operation.response] will be // [Empty][google.protobuf.Empty]. - rpc InstantiateInlineWorkflowTemplate(InstantiateInlineWorkflowTemplateRequest) returns (google.longrunning.Operation) { + rpc InstantiateInlineWorkflowTemplate( + InstantiateInlineWorkflowTemplateRequest) + returns (google.longrunning.Operation) { option (google.api.http) = { post: "/v1beta2/{parent=projects/*/locations/*}/workflowTemplates:instantiateInline" body: "template" @@ -143,7 +151,8 @@ service WorkflowTemplateService { // Updates (replaces) workflow template. The updated template // must contain version that matches the current server version. - rpc UpdateWorkflowTemplate(UpdateWorkflowTemplateRequest) returns (WorkflowTemplate) { + rpc UpdateWorkflowTemplate(UpdateWorkflowTemplateRequest) + returns (WorkflowTemplate) { option (google.api.http) = { put: "/v1beta2/{template.name=projects/*/regions/*/workflowTemplates/*}" body: "template" @@ -156,7 +165,8 @@ service WorkflowTemplateService { } // Lists workflows that match the specified filter in the request. - rpc ListWorkflowTemplates(ListWorkflowTemplatesRequest) returns (ListWorkflowTemplatesResponse) { + rpc ListWorkflowTemplates(ListWorkflowTemplatesRequest) + returns (ListWorkflowTemplatesResponse) { option (google.api.http) = { get: "/v1beta2/{parent=projects/*/regions/*}/workflowTemplates" additional_bindings { @@ -167,7 +177,8 @@ service WorkflowTemplateService { } // Deletes a workflow template. It does not cancel in-progress workflows. - rpc DeleteWorkflowTemplate(DeleteWorkflowTemplateRequest) returns (google.protobuf.Empty) { + rpc DeleteWorkflowTemplate(DeleteWorkflowTemplateRequest) + returns (google.protobuf.Empty) { option (google.api.http) = { delete: "/v1beta2/{name=projects/*/regions/*/workflowTemplates/*}" additional_bindings { @@ -220,10 +231,12 @@ message WorkflowTemplate { int32 version = 3 [(google.api.field_behavior) = OPTIONAL]; // Output only. The time template was created. - google.protobuf.Timestamp create_time = 4 [(google.api.field_behavior) = OUTPUT_ONLY]; + google.protobuf.Timestamp create_time = 4 + [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. The time template was last updated. - google.protobuf.Timestamp update_time = 5 [(google.api.field_behavior) = OUTPUT_ONLY]; + google.protobuf.Timestamp update_time = 5 + [(google.api.field_behavior) = OUTPUT_ONLY]; // Optional. The labels to associate with this template. These labels // will be propagated to all jobs and clusters created by the workflow @@ -248,7 +261,20 @@ message WorkflowTemplate { // Optional. Template parameters whose values are substituted into the // template. Values for parameters must be provided when the template is // instantiated. - repeated TemplateParameter parameters = 9 [(google.api.field_behavior) = OPTIONAL]; + repeated TemplateParameter parameters = 9 + [(google.api.field_behavior) = OPTIONAL]; + + // Optional. Timeout duration for the DAG of jobs. You can use "s", "m", "h", + // and "d" suffixes for second, minute, hour, and day duration values, + // respectively. The timeout duration must be from 10 minutes ("10m") to 24 + // hours ("24h" or "1d"). The timer begins when the first job is submitted. If + // the workflow is running at the end of the timeout period, any remaining + // jobs are cancelled, the workflow is terminated, and if the workflow was + // running on a [managed + // cluster](https://cloud.google.com/dataproc/docs/concepts/workflows/using-workflows#configuring_or_selecting_a_cluster), + // the cluster is deleted. + google.protobuf.Duration dag_timeout = 10 + [(google.api.field_behavior) = OPTIONAL]; } // Specifies workflow execution target. @@ -316,8 +342,8 @@ message OrderedJob { // // The step id is used as prefix for job id, as job // `goog-dataproc-workflow-step-id` label, and in - // [prerequisiteStepIds][google.cloud.dataproc.v1beta2.OrderedJob.prerequisite_step_ids] field from other - // steps. + // [prerequisiteStepIds][google.cloud.dataproc.v1beta2.OrderedJob.prerequisite_step_ids] + // field from other steps. // // The id must contain only letters (a-z, A-Z), numbers (0-9), // underscores (_), and hyphens (-). Cannot begin or end with underscore @@ -326,23 +352,29 @@ message OrderedJob { // Required. The job definition. oneof job_type { - HadoopJob hadoop_job = 2; + // Optional. Job is a Hadoop job. + HadoopJob hadoop_job = 2 [(google.api.field_behavior) = OPTIONAL]; - SparkJob spark_job = 3; + // Optional. Job is a Spark job. + SparkJob spark_job = 3 [(google.api.field_behavior) = OPTIONAL]; - PySparkJob pyspark_job = 4; + // Optional. Job is a PySpark job. + PySparkJob pyspark_job = 4 [(google.api.field_behavior) = OPTIONAL]; - HiveJob hive_job = 5; + // Optional. Job is a Hive job. + HiveJob hive_job = 5 [(google.api.field_behavior) = OPTIONAL]; - PigJob pig_job = 6; + // Optional. Job is a Pig job. + PigJob pig_job = 6 [(google.api.field_behavior) = OPTIONAL]; - // Spark R job - SparkRJob spark_r_job = 11; + // Optional. Job is a SparkR job. + SparkRJob spark_r_job = 11 [(google.api.field_behavior) = OPTIONAL]; - SparkSqlJob spark_sql_job = 7; + // Optional. Job is a SparkSql job. + SparkSqlJob spark_sql_job = 7 [(google.api.field_behavior) = OPTIONAL]; - // Presto job - PrestoJob presto_job = 12; + // Optional. Job is a Presto job. + PrestoJob presto_job = 12 [(google.api.field_behavior) = OPTIONAL]; } // Optional. The labels to associate with this job. @@ -362,7 +394,8 @@ message OrderedJob { // Optional. The optional list of prerequisite job step_ids. // If not specified, the job will start at the beginning of workflow. - repeated string prerequisite_step_ids = 10 [(google.api.field_behavior) = OPTIONAL]; + repeated string prerequisite_step_ids = 10 + [(google.api.field_behavior) = OPTIONAL]; } // A configurable parameter that replaces one or more fields in the template. @@ -388,10 +421,10 @@ message TemplateParameter { // A field is allowed to appear in at most one parameter's list of field // paths. // - // A field path is similar in syntax to a [google.protobuf.FieldMask][google.protobuf.FieldMask]. - // For example, a field path that references the zone field of a workflow - // template's cluster selector would be specified as - // `placement.clusterSelector.zone`. + // A field path is similar in syntax to a + // [google.protobuf.FieldMask][google.protobuf.FieldMask]. For example, a + // field path that references the zone field of a workflow template's cluster + // selector would be specified as `placement.clusterSelector.zone`. // // Also, field paths can reference fields using the following syntax: // @@ -498,13 +531,15 @@ message WorkflowMetadata { int32 version = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. The create cluster operation metadata. - ClusterOperation create_cluster = 3 [(google.api.field_behavior) = OUTPUT_ONLY]; + ClusterOperation create_cluster = 3 + [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. The workflow graph. WorkflowGraph graph = 4 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. The delete cluster operation metadata. - ClusterOperation delete_cluster = 5 [(google.api.field_behavior) = OUTPUT_ONLY]; + ClusterOperation delete_cluster = 5 + [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. The workflow state. State state = 6 [(google.api.field_behavior) = OUTPUT_ONLY]; @@ -516,13 +551,35 @@ message WorkflowMetadata { map parameters = 8; // Output only. Workflow start time. - google.protobuf.Timestamp start_time = 9 [(google.api.field_behavior) = OUTPUT_ONLY]; + google.protobuf.Timestamp start_time = 9 + [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. Workflow end time. - google.protobuf.Timestamp end_time = 10 [(google.api.field_behavior) = OUTPUT_ONLY]; + google.protobuf.Timestamp end_time = 10 + [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. The UUID of target cluster. string cluster_uuid = 11 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. The timeout duration for the DAG of jobs. + // Minimum timeout duration is 10 minutes and maximum is 24 hours, expressed + // as a + // [google.protobuf.Duration][https://developers.google.com/protocol-buffers/docs/proto3#json_mapping]. + // For example, "1800" = 1800 seconds/30 minutes duration. + google.protobuf.Duration dag_timeout = 12 + [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. DAG start time, which is only set for workflows with + // [dag_timeout][google.cloud.dataproc.v1beta2.WorkflowMetadata.dag_timeout] + // when the DAG begins. + google.protobuf.Timestamp dag_start_time = 13 + [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. DAG end time, which is only set for workflows with + // [dag_timeout][google.cloud.dataproc.v1beta2.WorkflowMetadata.dag_timeout] + // when the DAG ends. + google.protobuf.Timestamp dag_end_time = 14 + [(google.api.field_behavior) = OUTPUT_ONLY]; } // The cluster operation triggered by a workflow. @@ -571,7 +628,8 @@ message WorkflowNode { string step_id = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. Node's prerequisite nodes. - repeated string prerequisite_step_ids = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; + repeated string prerequisite_step_ids = 2 + [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. The job id; populated after the node enters RUNNING state. string job_id = 3 [(google.api.field_behavior) = OUTPUT_ONLY]; @@ -753,7 +811,8 @@ message ListWorkflowTemplatesRequest { // A response to a request to list workflow templates in a project. message ListWorkflowTemplatesResponse { // Output only. WorkflowTemplates list. - repeated WorkflowTemplate templates = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; + repeated WorkflowTemplate templates = 1 + [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. This token is included in the response if there are more // results to fetch. To fetch additional results, provide this value as the diff --git a/synth.metadata b/synth.metadata index b1183f80..c71a146e 100644 --- a/synth.metadata +++ b/synth.metadata @@ -11,16 +11,16 @@ "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "6fd07563a2f1a6785066f5955ad9659a315e4492", - "internalRef": "324941614" + "sha": "6d65640b1fcbdf26ea76cb720de0ac138cae9bed", + "internalRef": "347036369" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "6fd07563a2f1a6785066f5955ad9659a315e4492", - "internalRef": "324941614" + "sha": "6d65640b1fcbdf26ea76cb720de0ac138cae9bed", + "internalRef": "347036369" } }, { @@ -170,7 +170,6 @@ "google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1beta2/stub/WorkflowTemplateServiceStubSettings.java", "google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1/AutoscalingPolicyServiceClientTest.java", "google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1/ClusterControllerClientTest.java", - "google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1/ClusterControllerSmokeTest.java", "google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1/JobControllerClientTest.java", "google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1/MockAutoscalingPolicyService.java", "google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1/MockAutoscalingPolicyServiceImpl.java", @@ -183,7 +182,6 @@ "google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1/WorkflowTemplateServiceClientTest.java", "google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1beta2/AutoscalingPolicyServiceClientTest.java", "google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1beta2/ClusterControllerClientTest.java", - "google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1beta2/ClusterControllerSmokeTest.java", "google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1beta2/JobControllerClientTest.java", "google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1beta2/MockAutoscalingPolicyService.java", "google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1beta2/MockAutoscalingPolicyServiceImpl.java", @@ -406,6 +404,7 @@ "proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/ClusterConfigOrBuilder.java", "proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/ClusterMetrics.java", "proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/ClusterMetricsOrBuilder.java", + "proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/ClusterName.java", "proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/ClusterOperation.java", "proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/ClusterOperationMetadata.java", "proto-google-cloud-dataproc-v1beta2/src/main/java/com/google/cloud/dataproc/v1beta2/ClusterOperationMetadataOrBuilder.java",