From e76db900ad4ba4de8185608c5b82470aa601e480 Mon Sep 17 00:00:00 2001 From: yoshi-automation Date: Fri, 11 Dec 2020 16:17:02 -0800 Subject: [PATCH 1/6] chore: migrate java-bigquerystorage to the Java microgenerator Committer: @miraleung PiperOrigin-RevId: 345311069 Source-Author: Google APIs Source-Date: Wed Dec 2 14:17:15 2020 -0800 Source-Repo: googleapis/googleapis Source-Sha: e39e42f368d236203a774ee994fcb4d730c33a83 Source-Link: https://github.com/googleapis/googleapis/commit/e39e42f368d236203a774ee994fcb4d730c33a83 --- .../storage/v1/BaseBigQueryReadClient.java | 167 ++----- .../storage/v1/BaseBigQueryReadSettings.java | 33 +- .../bigquery/storage/v1/package-info.java | 23 +- .../storage/v1/stub/BigQueryReadStub.java | 9 +- .../v1/stub/BigQueryReadStubSettings.java | 99 ++-- .../stub/GrpcBigQueryReadCallableFactory.java | 40 +- .../storage/v1/stub/GrpcBigQueryReadStub.java | 37 +- .../storage/v1alpha2/BigQueryWriteClient.java | 444 ++++-------------- .../v1alpha2/BigQueryWriteSettings.java | 79 ++-- .../storage/v1alpha2/package-info.java | 20 +- .../v1alpha2/stub/BigQueryWriteStub.java | 37 +- .../stub/BigQueryWriteStubSettings.java | 189 ++++---- .../GrpcBigQueryWriteCallableFactory.java | 40 +- .../v1alpha2/stub/GrpcBigQueryWriteStub.java | 264 ++++++----- .../v1beta1/BaseBigQueryStorageClient.java | 319 +++---------- .../v1beta1/BaseBigQueryStorageSettings.java | 71 +-- .../storage/v1beta1/package-info.java | 23 +- .../v1beta1/stub/BigQueryStorageStub.java | 34 +- .../stub/BigQueryStorageStubSettings.java | 184 ++++---- .../GrpcBigQueryStorageCallableFactory.java | 40 +- .../v1beta1/stub/GrpcBigQueryStorageStub.java | 194 ++++---- .../v1beta2/BaseBigQueryReadClient.java | 167 ++----- .../v1beta2/BaseBigQueryReadSettings.java | 33 +- .../storage/v1beta2/BigQueryWriteClient.java | 329 ++----------- .../v1beta2/BigQueryWriteSettings.java | 36 +- .../storage/v1beta2/package-info.java | 35 +- .../v1beta2/stub/BigQueryReadStub.java | 9 +- .../stub/BigQueryReadStubSettings.java | 148 ++---- .../v1beta2/stub/BigQueryWriteStub.java | 9 +- .../stub/BigQueryWriteStubSettings.java | 159 ++----- .../stub/GrpcBigQueryReadCallableFactory.java | 40 +- .../v1beta2/stub/GrpcBigQueryReadStub.java | 37 +- .../GrpcBigQueryWriteCallableFactory.java | 40 +- .../v1beta2/stub/GrpcBigQueryWriteStub.java | 49 +- .../v1/BaseBigQueryReadClientTest.java | 152 ++++-- .../bigquery/storage/v1/MockBigQueryRead.java | 6 +- .../storage/v1/MockBigQueryReadImpl.java | 18 +- .../v1alpha2/BigQueryWriteClientTest.java | 360 ++++++++++---- .../storage/v1alpha2/MockBigQueryWrite.java | 6 +- .../v1alpha2/MockBigQueryWriteImpl.java | 79 ++-- .../BaseBigQueryStorageClientTest.java | 189 ++++---- .../storage/v1beta1/MockBigQueryStorage.java | 6 +- .../v1beta1/MockBigQueryStorageImpl.java | 58 ++- .../v1beta2/BaseBigQueryReadClientTest.java | 157 +++++-- .../v1beta2/BigQueryWriteClientTest.java | 301 +++++++++--- .../storage/v1beta2/MockBigQueryRead.java | 6 +- .../storage/v1beta2/MockBigQueryReadImpl.java | 18 +- .../storage/v1beta2/MockBigQueryWrite.java | 6 +- .../v1beta2/MockBigQueryWriteImpl.java | 30 +- .../bigquery/storage/v1/ProjectName.java | 92 ++-- .../bigquery/storage/v1/ReadSessionName.java | 210 --------- .../bigquery/storage/v1/ReadStreamName.java | 131 +++--- .../cloud/bigquery/storage/v1/TableName.java | 204 -------- .../bigquery/storage/v1alpha2/TableName.java | 119 ++--- .../storage/v1alpha2/WriteStreamName.java | 131 +++--- .../bigquery/storage/v1beta1/ProjectName.java | 92 ++-- .../storage/v1beta1/ReadSessionName.java | 210 --------- .../bigquery/storage/v1beta1/StreamName.java | 210 --------- .../bigquery/storage/v1beta2/ProjectName.java | 92 ++-- .../storage/v1beta2/ReadSessionName.java | 210 --------- .../storage/v1beta2/ReadStreamName.java | 131 +++--- .../bigquery/storage/v1beta2/TableName.java | 119 ++--- .../storage/v1beta2/WriteStreamName.java | 131 +++--- synth.metadata | 23 +- 64 files changed, 2722 insertions(+), 4212 deletions(-) delete mode 100644 proto-google-cloud-bigquerystorage-v1/src/main/java/com/google/cloud/bigquery/storage/v1/ReadSessionName.java delete mode 100644 proto-google-cloud-bigquerystorage-v1/src/main/java/com/google/cloud/bigquery/storage/v1/TableName.java delete mode 100644 proto-google-cloud-bigquerystorage-v1beta1/src/main/java/com/google/cloud/bigquery/storage/v1beta1/ReadSessionName.java delete mode 100644 proto-google-cloud-bigquerystorage-v1beta1/src/main/java/com/google/cloud/bigquery/storage/v1beta1/StreamName.java delete mode 100644 proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/ReadSessionName.java diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/BaseBigQueryReadClient.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/BaseBigQueryReadClient.java index 3c36401335..de3f88a6d8 100644 --- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/BaseBigQueryReadClient.java +++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/BaseBigQueryReadClient.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,6 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.bigquery.storage.v1; import com.google.api.core.BetaApi; @@ -22,10 +23,11 @@ import com.google.cloud.bigquery.storage.v1.stub.BigQueryReadStub; import com.google.cloud.bigquery.storage.v1.stub.BigQueryReadStubSettings; import java.io.IOException; +import java.util.Objects; import java.util.concurrent.TimeUnit; import javax.annotation.Generated; -// AUTO-GENERATED DOCUMENTATION AND SERVICE +// AUTO-GENERATED DOCUMENTATION AND CLASS. /** * Service Description: BigQuery Read API. * @@ -34,18 +36,7 @@ *

This class provides the ability to make remote calls to the backing service through method * calls that map to API methods. Sample code to get started: * - *

- * 
- * try (BaseBigQueryReadClient baseBigQueryReadClient = BaseBigQueryReadClient.create()) {
- *   ProjectName parent = ProjectName.of("[PROJECT]");
- *   ReadSession readSession = ReadSession.newBuilder().build();
- *   int maxStreamCount = 0;
- *   ReadSession response = baseBigQueryReadClient.createReadSession(parent, readSession, maxStreamCount);
- * }
- * 
- * 
- * - *

Note: close() needs to be called on the baseBigQueryReadClient object to clean up resources + *

Note: close() needs to be called on the BaseBigQueryReadClient object to clean up resources * such as threads. In the example above, try-with-resources is used, which automatically calls * close(). * @@ -74,30 +65,28 @@ * *

To customize credentials: * - *

- * 
+ * 
{@code
  * BaseBigQueryReadSettings baseBigQueryReadSettings =
  *     BaseBigQueryReadSettings.newBuilder()
  *         .setCredentialsProvider(FixedCredentialsProvider.create(myCredentials))
  *         .build();
  * BaseBigQueryReadClient baseBigQueryReadClient =
  *     BaseBigQueryReadClient.create(baseBigQueryReadSettings);
- * 
- * 
+ * }
* - * To customize the endpoint: + *

To customize the endpoint: * - *

- * 
+ * 
{@code
  * BaseBigQueryReadSettings baseBigQueryReadSettings =
  *     BaseBigQueryReadSettings.newBuilder().setEndpoint(myEndpoint).build();
  * BaseBigQueryReadClient baseBigQueryReadClient =
  *     BaseBigQueryReadClient.create(baseBigQueryReadSettings);
- * 
- * 
+ * }
+ * + *

Please refer to the GitHub repository's samples for more quickstart code snippets. */ -@Generated("by gapic-generator") @BetaApi +@Generated("by gapic-generator") public class BaseBigQueryReadClient implements BackgroundResource { private final BaseBigQueryReadSettings settings; private final BigQueryReadStub stub; @@ -118,7 +107,7 @@ public static final BaseBigQueryReadClient create(BaseBigQueryReadSettings setti /** * Constructs an instance of BaseBigQueryReadClient, using the given stub for making calls. This - * is for advanced usage - prefer to use BaseBigQueryReadSettings}. + * is for advanced usage - prefer using create(BaseBigQueryReadSettings). */ @BetaApi("A restructuring of stub classes is planned, so this may break in the future") public static final BaseBigQueryReadClient create(BigQueryReadStub stub) { @@ -150,7 +139,7 @@ public BigQueryReadStub getStub() { return stub; } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Creates a new read session. A read session divides the contents of a BigQuery table into one or * more streams, which can then be used to read data from the table. The read session also @@ -169,25 +158,14 @@ public BigQueryReadStub getStub() { *

Read sessions automatically expire 24 hours after they are created and do not require manual * clean-up by the caller. * - *

Sample code: - * - *


-   * try (BaseBigQueryReadClient baseBigQueryReadClient = BaseBigQueryReadClient.create()) {
-   *   ProjectName parent = ProjectName.of("[PROJECT]");
-   *   ReadSession readSession = ReadSession.newBuilder().build();
-   *   int maxStreamCount = 0;
-   *   ReadSession response = baseBigQueryReadClient.createReadSession(parent, readSession, maxStreamCount);
-   * }
-   * 
- * * @param parent Required. The request project that owns the session, in the form of * `projects/{project_id}`. - * @param readSession Required. Session to be created. - * @param maxStreamCount Max initial number of streams. If unset or zero, the server will provide - * a value of streams so as to produce reasonable throughput. Must be non-negative. The number - * of streams may be lower than the requested number, depending on the amount parallelism that - * is reasonable for the table. Error will be returned if the max count is greater than the - * current system max limit of 1,000. + * @param read_session Required. Session to be created. + * @param max_stream_count Max initial number of streams. If unset or zero, the server will + * provide a value of streams so as to produce reasonable throughput. Must be non-negative. + * The number of streams may be lower than the requested number, depending on the amount + * parallelism that is reasonable for the table. Error will be returned if the max count is + * greater than the current system max limit of 1,000. *

Streams must be read starting from offset 0. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -195,14 +173,14 @@ public final ReadSession createReadSession( ProjectName parent, ReadSession readSession, int maxStreamCount) { CreateReadSessionRequest request = CreateReadSessionRequest.newBuilder() - .setParent(parent == null ? null : parent.toString()) + .setParent(Objects.isNull(parent) ? null : parent.toString()) .setReadSession(readSession) .setMaxStreamCount(maxStreamCount) .build(); return createReadSession(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Creates a new read session. A read session divides the contents of a BigQuery table into one or * more streams, which can then be used to read data from the table. The read session also @@ -221,25 +199,14 @@ public final ReadSession createReadSession( *

Read sessions automatically expire 24 hours after they are created and do not require manual * clean-up by the caller. * - *

Sample code: - * - *


-   * try (BaseBigQueryReadClient baseBigQueryReadClient = BaseBigQueryReadClient.create()) {
-   *   ProjectName parent = ProjectName.of("[PROJECT]");
-   *   ReadSession readSession = ReadSession.newBuilder().build();
-   *   int maxStreamCount = 0;
-   *   ReadSession response = baseBigQueryReadClient.createReadSession(parent.toString(), readSession, maxStreamCount);
-   * }
-   * 
- * * @param parent Required. The request project that owns the session, in the form of * `projects/{project_id}`. - * @param readSession Required. Session to be created. - * @param maxStreamCount Max initial number of streams. If unset or zero, the server will provide - * a value of streams so as to produce reasonable throughput. Must be non-negative. The number - * of streams may be lower than the requested number, depending on the amount parallelism that - * is reasonable for the table. Error will be returned if the max count is greater than the - * current system max limit of 1,000. + * @param read_session Required. Session to be created. + * @param max_stream_count Max initial number of streams. If unset or zero, the server will + * provide a value of streams so as to produce reasonable throughput. Must be non-negative. + * The number of streams may be lower than the requested number, depending on the amount + * parallelism that is reasonable for the table. Error will be returned if the max count is + * greater than the current system max limit of 1,000. *

Streams must be read starting from offset 0. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -254,7 +221,7 @@ public final ReadSession createReadSession( return createReadSession(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Creates a new read session. A read session divides the contents of a BigQuery table into one or * more streams, which can then be used to read data from the table. The read session also @@ -273,20 +240,6 @@ public final ReadSession createReadSession( *

Read sessions automatically expire 24 hours after they are created and do not require manual * clean-up by the caller. * - *

Sample code: - * - *


-   * try (BaseBigQueryReadClient baseBigQueryReadClient = BaseBigQueryReadClient.create()) {
-   *   ProjectName parent = ProjectName.of("[PROJECT]");
-   *   ReadSession readSession = ReadSession.newBuilder().build();
-   *   CreateReadSessionRequest request = CreateReadSessionRequest.newBuilder()
-   *     .setParent(parent.toString())
-   *     .setReadSession(readSession)
-   *     .build();
-   *   ReadSession response = baseBigQueryReadClient.createReadSession(request);
-   * }
-   * 
- * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -294,7 +247,7 @@ public final ReadSession createReadSession(CreateReadSessionRequest request) { return createReadSessionCallable().call(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Creates a new read session. A read session divides the contents of a BigQuery table into one or * more streams, which can then be used to read data from the table. The read session also @@ -314,26 +267,12 @@ public final ReadSession createReadSession(CreateReadSessionRequest request) { * clean-up by the caller. * *

Sample code: - * - *


-   * try (BaseBigQueryReadClient baseBigQueryReadClient = BaseBigQueryReadClient.create()) {
-   *   ProjectName parent = ProjectName.of("[PROJECT]");
-   *   ReadSession readSession = ReadSession.newBuilder().build();
-   *   CreateReadSessionRequest request = CreateReadSessionRequest.newBuilder()
-   *     .setParent(parent.toString())
-   *     .setReadSession(readSession)
-   *     .build();
-   *   ApiFuture<ReadSession> future = baseBigQueryReadClient.createReadSessionCallable().futureCall(request);
-   *   // Do something
-   *   ReadSession response = future.get();
-   * }
-   * 
*/ public final UnaryCallable createReadSessionCallable() { return stub.createReadSessionCallable(); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Reads rows from the stream in the format prescribed by the ReadSession. Each response contains * one or more table rows, up to a maximum of 100 MiB per response; read requests which attempt to @@ -343,26 +282,12 @@ public final UnaryCallable createReadSess * stream. * *

Sample code: - * - *


-   * try (BaseBigQueryReadClient baseBigQueryReadClient = BaseBigQueryReadClient.create()) {
-   *   ReadStreamName readStream = ReadStreamName.of("[PROJECT]", "[LOCATION]", "[SESSION]", "[STREAM]");
-   *   ReadRowsRequest request = ReadRowsRequest.newBuilder()
-   *     .setReadStream(readStream.toString())
-   *     .build();
-   *
-   *   ServerStream<ReadRowsResponse> stream = baseBigQueryReadClient.readRowsCallable().call(request);
-   *   for (ReadRowsResponse response : stream) {
-   *     // Do something when receive a response
-   *   }
-   * }
-   * 
*/ public final ServerStreamingCallable readRowsCallable() { return stub.readRowsCallable(); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Splits a given `ReadStream` into two `ReadStream` objects. These `ReadStream` objects are * referred to as the primary and the residual streams of the split. The original `ReadStream` can @@ -375,18 +300,6 @@ public final ServerStreamingCallable readRows * original[0-j] = primary[0-j] and original[j-n] = residual[0-m] once the streams have been read * to completion. * - *

Sample code: - * - *


-   * try (BaseBigQueryReadClient baseBigQueryReadClient = BaseBigQueryReadClient.create()) {
-   *   ReadStreamName name = ReadStreamName.of("[PROJECT]", "[LOCATION]", "[SESSION]", "[STREAM]");
-   *   SplitReadStreamRequest request = SplitReadStreamRequest.newBuilder()
-   *     .setName(name.toString())
-   *     .build();
-   *   SplitReadStreamResponse response = baseBigQueryReadClient.splitReadStream(request);
-   * }
-   * 
- * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -394,7 +307,7 @@ public final SplitReadStreamResponse splitReadStream(SplitReadStreamRequest requ return splitReadStreamCallable().call(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Splits a given `ReadStream` into two `ReadStream` objects. These `ReadStream` objects are * referred to as the primary and the residual streams of the split. The original `ReadStream` can @@ -408,18 +321,6 @@ public final SplitReadStreamResponse splitReadStream(SplitReadStreamRequest requ * to completion. * *

Sample code: - * - *


-   * try (BaseBigQueryReadClient baseBigQueryReadClient = BaseBigQueryReadClient.create()) {
-   *   ReadStreamName name = ReadStreamName.of("[PROJECT]", "[LOCATION]", "[SESSION]", "[STREAM]");
-   *   SplitReadStreamRequest request = SplitReadStreamRequest.newBuilder()
-   *     .setName(name.toString())
-   *     .build();
-   *   ApiFuture<SplitReadStreamResponse> future = baseBigQueryReadClient.splitReadStreamCallable().futureCall(request);
-   *   // Do something
-   *   SplitReadStreamResponse response = future.get();
-   * }
-   * 
*/ public final UnaryCallable splitReadStreamCallable() { diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/BaseBigQueryReadSettings.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/BaseBigQueryReadSettings.java index 7dfff71a93..d9669ef04a 100644 --- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/BaseBigQueryReadSettings.java +++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/BaseBigQueryReadSettings.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,6 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.bigquery.storage.v1; import com.google.api.core.ApiFunction; @@ -31,7 +32,7 @@ import java.util.List; import javax.annotation.Generated; -// AUTO-GENERATED DOCUMENTATION AND CLASS +// AUTO-GENERATED DOCUMENTATION AND CLASS. /** * Settings class to configure an instance of {@link BaseBigQueryReadClient}. * @@ -49,23 +50,24 @@ * *

For example, to set the total timeout of createReadSession to 30 seconds: * - *

- * 
+ * 
{@code
  * BaseBigQueryReadSettings.Builder baseBigQueryReadSettingsBuilder =
  *     BaseBigQueryReadSettings.newBuilder();
  * baseBigQueryReadSettingsBuilder
  *     .createReadSessionSettings()
  *     .setRetrySettings(
- *         baseBigQueryReadSettingsBuilder.createReadSessionSettings().getRetrySettings().toBuilder()
+ *         baseBigQueryReadSettingsBuilder
+ *             .createReadSessionSettings()
+ *             .getRetrySettings()
+ *             .toBuilder()
  *             .setTotalTimeout(Duration.ofSeconds(30))
  *             .build());
  * BaseBigQueryReadSettings baseBigQueryReadSettings = baseBigQueryReadSettingsBuilder.build();
- * 
- * 
+ * }
*/ -@Generated("by gapic-generator") -@BetaApi +@Generated("by gapic-generator-java") public class BaseBigQueryReadSettings extends ClientSettings { + /** Returns the object with the settings used for calls to createReadSession. */ public UnaryCallSettings createReadSessionSettings() { return ((BigQueryReadStubSettings) getStubSettings()).createReadSessionSettings(); @@ -142,18 +144,15 @@ protected BaseBigQueryReadSettings(Builder settingsBuilder) throws IOException { /** Builder for BaseBigQueryReadSettings. */ public static class Builder extends ClientSettings.Builder { + protected Builder() throws IOException { - this((ClientContext) null); + this(((ClientContext) null)); } protected Builder(ClientContext clientContext) { super(BigQueryReadStubSettings.newBuilder(clientContext)); } - private static Builder createDefault() { - return new Builder(BigQueryReadStubSettings.newBuilder()); - } - protected Builder(BaseBigQueryReadSettings settings) { super(settings.getStubSettings().toBuilder()); } @@ -162,11 +161,15 @@ protected Builder(BigQueryReadStubSettings.Builder stubSettings) { super(stubSettings); } + private static Builder createDefault() { + return new Builder(BigQueryReadStubSettings.newBuilder()); + } + public BigQueryReadStubSettings.Builder getStubSettingsBuilder() { return ((BigQueryReadStubSettings.Builder) getStubSettings()); } - // NEXT_MAJOR_VER: remove 'throws Exception' + // NEXT_MAJOR_VER: remove 'throws Exception'. /** * Applies the given settings updater function to all of the unary API methods in this service. * diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/package-info.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/package-info.java index a29e6a13d4..b6a07a3c4a 100644 --- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/package-info.java +++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/package-info.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,30 +15,17 @@ */ /** - * A client to BigQuery Storage API. + * The interfaces provided are listed below, along with usage samples. * - *

The interfaces provided are listed below, along with usage samples. - * - *

====================== BaseBigQueryReadClient ====================== + *

======================= BigQueryReadClient ======================= * *

Service Description: BigQuery Read API. * *

The Read API can be used to read data from BigQuery. * - *

Sample for BaseBigQueryReadClient: - * - *

- * 
- * try (BaseBigQueryReadClient baseBigQueryReadClient = BaseBigQueryReadClient.create()) {
- *   ProjectName parent = ProjectName.of("[PROJECT]");
- *   ReadSession readSession = ReadSession.newBuilder().build();
- *   int maxStreamCount = 0;
- *   ReadSession response = baseBigQueryReadClient.createReadSession(parent, readSession, maxStreamCount);
- * }
- * 
- * 
+ *

Sample for BigQueryReadClient: */ -@Generated("by gapic-generator") +@Generated("by gapic-generator-java") package com.google.cloud.bigquery.storage.v1; import javax.annotation.Generated; diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/stub/BigQueryReadStub.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/stub/BigQueryReadStub.java index 01bff92268..85cb247aaf 100644 --- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/stub/BigQueryReadStub.java +++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/stub/BigQueryReadStub.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,9 +13,9 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.bigquery.storage.v1.stub; -import com.google.api.core.BetaApi; import com.google.api.gax.core.BackgroundResource; import com.google.api.gax.rpc.ServerStreamingCallable; import com.google.api.gax.rpc.UnaryCallable; @@ -27,14 +27,13 @@ import com.google.cloud.bigquery.storage.v1.SplitReadStreamResponse; import javax.annotation.Generated; -// AUTO-GENERATED DOCUMENTATION AND CLASS +// AUTO-GENERATED DOCUMENTATION AND CLASS. /** - * Base stub class for BigQuery Storage API. + * Base stub class for the BigQueryRead service API. * *

This class is for advanced usage and reflects the underlying API directly. */ @Generated("by gapic-generator") -@BetaApi("A restructuring of stub classes is planned, so this may break in the future") public abstract class BigQueryReadStub implements BackgroundResource { public UnaryCallable createReadSessionCallable() { diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/stub/BigQueryReadStubSettings.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/stub/BigQueryReadStubSettings.java index 1b657327c5..643f8c3d21 100644 --- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/stub/BigQueryReadStubSettings.java +++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/stub/BigQueryReadStubSettings.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,6 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.bigquery.storage.v1.stub; import com.google.api.core.ApiFunction; @@ -46,7 +47,7 @@ import javax.annotation.Generated; import org.threeten.bp.Duration; -// AUTO-GENERATED DOCUMENTATION AND CLASS +// AUTO-GENERATED DOCUMENTATION AND CLASS. /** * Settings class to configure an instance of {@link BigQueryReadStub}. * @@ -64,22 +65,23 @@ * *

For example, to set the total timeout of createReadSession to 30 seconds: * - *

- * 
- * BigQueryReadStubSettings.Builder baseBigQueryReadSettingsBuilder =
+ * 
{@code
+ * BigQueryReadStubSettings.Builder bigQueryReadSettingsBuilder =
  *     BigQueryReadStubSettings.newBuilder();
- * baseBigQueryReadSettingsBuilder
+ * bigQueryReadSettingsBuilder
  *     .createReadSessionSettings()
  *     .setRetrySettings(
- *         baseBigQueryReadSettingsBuilder.createReadSessionSettings().getRetrySettings().toBuilder()
+ *         bigQueryReadSettingsBuilder
+ *             .createReadSessionSettings()
+ *             .getRetrySettings()
+ *             .toBuilder()
  *             .setTotalTimeout(Duration.ofSeconds(30))
  *             .build());
- * BigQueryReadStubSettings baseBigQueryReadSettings = baseBigQueryReadSettingsBuilder.build();
- * 
- * 
+ * BigQueryReadStubSettings bigQueryReadSettings = bigQueryReadSettingsBuilder.build(); + * }
*/ -@Generated("by gapic-generator") @BetaApi +@Generated("by gapic-generator-java") public class BigQueryReadStubSettings extends StubSettings { /** The default scopes of the service. */ private static final ImmutableList DEFAULT_SERVICE_SCOPES = @@ -116,10 +118,10 @@ public BigQueryReadStub createStub() throws IOException { .getTransportName() .equals(GrpcTransportChannel.getGrpcTransportName())) { return GrpcBigQueryReadStub.create(this); - } else { - throw new UnsupportedOperationException( - "Transport not supported: " + getTransportChannelProvider().getTransportName()); } + throw new UnsupportedOperationException( + String.format( + "Transport not supported: %s", getTransportChannelProvider().getTransportName())); } /** Returns a builder for the default ExecutorProvider for this service. */ @@ -187,14 +189,12 @@ protected BigQueryReadStubSettings(Builder settingsBuilder) throws IOException { /** Builder for BigQueryReadStubSettings. */ public static class Builder extends StubSettings.Builder { private final ImmutableList> unaryMethodSettingsBuilders; - private final UnaryCallSettings.Builder createReadSessionSettings; private final ServerStreamingCallSettings.Builder readRowsSettings; private final UnaryCallSettings.Builder splitReadStreamSettings; - private static final ImmutableMap> RETRYABLE_CODE_DEFINITIONS; @@ -202,19 +202,18 @@ public static class Builder extends StubSettings.Builder> definitions = ImmutableMap.builder(); definitions.put( - "retry_policy_1_codes", + "retry_policy_0_codes", ImmutableSet.copyOf( Lists.newArrayList( StatusCode.Code.DEADLINE_EXCEEDED, StatusCode.Code.UNAVAILABLE))); - definitions.put("no_retry_codes", ImmutableSet.copyOf(Lists.newArrayList())); definitions.put( - "retry_policy_3_codes", + "retry_policy_1_codes", + ImmutableSet.copyOf(Lists.newArrayList(StatusCode.Code.UNAVAILABLE))); + definitions.put( + "retry_policy_2_codes", ImmutableSet.copyOf( Lists.newArrayList( StatusCode.Code.DEADLINE_EXCEEDED, StatusCode.Code.UNAVAILABLE))); - definitions.put( - "retry_policy_2_codes", - ImmutableSet.copyOf(Lists.newArrayList(StatusCode.Code.UNAVAILABLE))); RETRYABLE_CODE_DEFINITIONS = definitions.build(); } @@ -233,7 +232,7 @@ public static class Builder extends StubSettings.Builder>of( createReadSessionSettings, splitReadStreamSettings); - initDefaults(this); } + protected Builder(BigQueryReadStubSettings settings) { + super(settings); + + createReadSessionSettings = settings.createReadSessionSettings.toBuilder(); + readRowsSettings = settings.readRowsSettings.toBuilder(); + splitReadStreamSettings = settings.splitReadStreamSettings.toBuilder(); + + unaryMethodSettingsBuilders = + ImmutableList.>of( + createReadSessionSettings, splitReadStreamSettings); + } + private static Builder createDefault() { - Builder builder = new Builder((ClientContext) null); + Builder builder = new Builder(((ClientContext) null)); + builder.setTransportChannelProvider(defaultTransportChannelProvider()); builder.setCredentialsProvider(defaultCredentialsProviderBuilder().build()); builder.setInternalHeaderProvider(defaultApiClientHeaderProviderBuilder().build()); builder.setEndpoint(getDefaultEndpoint()); + return initDefaults(builder); } private static Builder initDefaults(Builder builder) { - builder .createReadSessionSettings() - .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_1_codes")) - .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_1_params")); + .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) + .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")); builder .readRowsSettings() - .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_2_codes")) - .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_2_params")); + .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_1_codes")) + .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_1_params")); builder .splitReadStreamSettings() - .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_3_codes")) - .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_3_params")); + .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_2_codes")) + .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_2_params")); return builder; } - protected Builder(BigQueryReadStubSettings settings) { - super(settings); - - createReadSessionSettings = settings.createReadSessionSettings.toBuilder(); - readRowsSettings = settings.readRowsSettings.toBuilder(); - splitReadStreamSettings = settings.splitReadStreamSettings.toBuilder(); - - unaryMethodSettingsBuilders = - ImmutableList.>of( - createReadSessionSettings, splitReadStreamSettings); - } - - // NEXT_MAJOR_VER: remove 'throws Exception' + // NEXT_MAJOR_VER: remove 'throws Exception'. /** * Applies the given settings updater function to all of the unary API methods in this service. * diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/stub/GrpcBigQueryReadCallableFactory.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/stub/GrpcBigQueryReadCallableFactory.java index 886b58e704..3da5e2a734 100644 --- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/stub/GrpcBigQueryReadCallableFactory.java +++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/stub/GrpcBigQueryReadCallableFactory.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,9 +13,9 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.bigquery.storage.v1.stub; -import com.google.api.core.BetaApi; import com.google.api.gax.grpc.GrpcCallSettings; import com.google.api.gax.grpc.GrpcCallableFactory; import com.google.api.gax.grpc.GrpcStubCallableFactory; @@ -31,18 +31,19 @@ import com.google.api.gax.rpc.StreamingCallSettings; import com.google.api.gax.rpc.UnaryCallSettings; import com.google.api.gax.rpc.UnaryCallable; +import com.google.longrunning.Operation; import com.google.longrunning.stub.OperationsStub; import javax.annotation.Generated; -// AUTO-GENERATED DOCUMENTATION AND CLASS +// AUTO-GENERATED DOCUMENTATION AND CLASS. /** - * gRPC callable factory implementation for BigQuery Storage API. + * gRPC callable factory implementation for the BigQueryRead service API. * *

This class is for advanced usage. */ @Generated("by gapic-generator") -@BetaApi("The surface for use by generated code is not stable yet and may change in the future.") public class GrpcBigQueryReadCallableFactory implements GrpcStubCallableFactory { + @Override public UnaryCallable createUnaryCallable( GrpcCallSettings grpcCallSettings, @@ -55,61 +56,58 @@ public UnaryCallable createUnaryCalla public UnaryCallable createPagedCallable( GrpcCallSettings grpcCallSettings, - PagedCallSettings pagedCallSettings, + PagedCallSettings callSettings, ClientContext clientContext) { - return GrpcCallableFactory.createPagedCallable( - grpcCallSettings, pagedCallSettings, clientContext); + return GrpcCallableFactory.createPagedCallable(grpcCallSettings, callSettings, clientContext); } @Override public UnaryCallable createBatchingCallable( GrpcCallSettings grpcCallSettings, - BatchingCallSettings batchingCallSettings, + BatchingCallSettings callSettings, ClientContext clientContext) { return GrpcCallableFactory.createBatchingCallable( - grpcCallSettings, batchingCallSettings, clientContext); + grpcCallSettings, callSettings, clientContext); } - @BetaApi( - "The surface for long-running operations is not stable yet and may change in the future.") @Override public OperationCallable createOperationCallable( - GrpcCallSettings grpcCallSettings, - OperationCallSettings operationCallSettings, + GrpcCallSettings grpcCallSettings, + OperationCallSettings callSettings, ClientContext clientContext, OperationsStub operationsStub) { return GrpcCallableFactory.createOperationCallable( - grpcCallSettings, operationCallSettings, clientContext, operationsStub); + grpcCallSettings, callSettings, clientContext, operationsStub); } @Override public BidiStreamingCallable createBidiStreamingCallable( GrpcCallSettings grpcCallSettings, - StreamingCallSettings streamingCallSettings, + StreamingCallSettings callSettings, ClientContext clientContext) { return GrpcCallableFactory.createBidiStreamingCallable( - grpcCallSettings, streamingCallSettings, clientContext); + grpcCallSettings, callSettings, clientContext); } @Override public ServerStreamingCallable createServerStreamingCallable( GrpcCallSettings grpcCallSettings, - ServerStreamingCallSettings streamingCallSettings, + ServerStreamingCallSettings callSettings, ClientContext clientContext) { return GrpcCallableFactory.createServerStreamingCallable( - grpcCallSettings, streamingCallSettings, clientContext); + grpcCallSettings, callSettings, clientContext); } @Override public ClientStreamingCallable createClientStreamingCallable( GrpcCallSettings grpcCallSettings, - StreamingCallSettings streamingCallSettings, + StreamingCallSettings callSettings, ClientContext clientContext) { return GrpcCallableFactory.createClientStreamingCallable( - grpcCallSettings, streamingCallSettings, clientContext); + grpcCallSettings, callSettings, clientContext); } } diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/stub/GrpcBigQueryReadStub.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/stub/GrpcBigQueryReadStub.java index edb90c4e5a..6e887492d7 100644 --- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/stub/GrpcBigQueryReadStub.java +++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/stub/GrpcBigQueryReadStub.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,9 +13,9 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.bigquery.storage.v1.stub; -import com.google.api.core.BetaApi; import com.google.api.gax.core.BackgroundResource; import com.google.api.gax.core.BackgroundResourceAggregation; import com.google.api.gax.grpc.GrpcCallSettings; @@ -31,6 +31,7 @@ import com.google.cloud.bigquery.storage.v1.SplitReadStreamRequest; import com.google.cloud.bigquery.storage.v1.SplitReadStreamResponse; import com.google.common.collect.ImmutableMap; +import com.google.longrunning.stub.GrpcOperationsStub; import io.grpc.MethodDescriptor; import io.grpc.protobuf.ProtoUtils; import java.io.IOException; @@ -38,16 +39,14 @@ import java.util.concurrent.TimeUnit; import javax.annotation.Generated; -// AUTO-GENERATED DOCUMENTATION AND CLASS +// AUTO-GENERATED DOCUMENTATION AND CLASS. /** - * gRPC stub implementation for BigQuery Storage API. + * gRPC stub implementation for the BigQueryRead service API. * *

This class is for advanced usage and reflects the underlying API directly. */ -@Generated("by gapic-generator") -@BetaApi("A restructuring of stub classes is planned, so this may break in the future") +@Generated("by gapic-generator-java") public class GrpcBigQueryReadStub extends BigQueryReadStub { - private static final MethodDescriptor createReadSessionMethodDescriptor = MethodDescriptor.newBuilder() @@ -57,6 +56,7 @@ public class GrpcBigQueryReadStub extends BigQueryReadStub { ProtoUtils.marshaller(CreateReadSessionRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(ReadSession.getDefaultInstance())) .build(); + private static final MethodDescriptor readRowsMethodDescriptor = MethodDescriptor.newBuilder() @@ -65,6 +65,7 @@ public class GrpcBigQueryReadStub extends BigQueryReadStub { .setRequestMarshaller(ProtoUtils.marshaller(ReadRowsRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(ReadRowsResponse.getDefaultInstance())) .build(); + private static final MethodDescriptor splitReadStreamMethodDescriptor = MethodDescriptor.newBuilder() @@ -76,13 +77,13 @@ public class GrpcBigQueryReadStub extends BigQueryReadStub { ProtoUtils.marshaller(SplitReadStreamResponse.getDefaultInstance())) .build(); - private final BackgroundResource backgroundResources; - private final UnaryCallable createReadSessionCallable; private final ServerStreamingCallable readRowsCallable; private final UnaryCallable splitReadStreamCallable; + private final BackgroundResource backgroundResources; + private final GrpcOperationsStub operationsStub; private final GrpcStubCallableFactory callableFactory; public static final GrpcBigQueryReadStub create(BigQueryReadStubSettings settings) @@ -100,27 +101,18 @@ public static final GrpcBigQueryReadStub create( BigQueryReadStubSettings.newBuilder().build(), clientContext, callableFactory); } - /** - * Constructs an instance of GrpcBigQueryReadStub, using the given settings. This is protected so - * that it is easy to make a subclass, but otherwise, the static factory methods should be - * preferred. - */ protected GrpcBigQueryReadStub(BigQueryReadStubSettings settings, ClientContext clientContext) throws IOException { this(settings, clientContext, new GrpcBigQueryReadCallableFactory()); } - /** - * Constructs an instance of GrpcBigQueryReadStub, using the given settings. This is protected so - * that it is easy to make a subclass, but otherwise, the static factory methods should be - * preferred. - */ protected GrpcBigQueryReadStub( BigQueryReadStubSettings settings, ClientContext clientContext, GrpcStubCallableFactory callableFactory) throws IOException { this.callableFactory = callableFactory; + this.operationsStub = GrpcOperationsStub.create(clientContext, callableFactory); GrpcCallSettings createReadSessionTransportSettings = GrpcCallSettings.newBuilder() @@ -176,7 +168,12 @@ public Map extract(SplitReadStreamRequest request) { callableFactory.createUnaryCallable( splitReadStreamTransportSettings, settings.splitReadStreamSettings(), clientContext); - backgroundResources = new BackgroundResourceAggregation(clientContext.getBackgroundResources()); + this.backgroundResources = + new BackgroundResourceAggregation(clientContext.getBackgroundResources()); + } + + public GrpcOperationsStub getOperationsStub() { + return operationsStub; } public UnaryCallable createReadSessionCallable() { diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/BigQueryWriteClient.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/BigQueryWriteClient.java index edeb6e2800..5f23a07a74 100644 --- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/BigQueryWriteClient.java +++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/BigQueryWriteClient.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,30 +13,21 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.bigquery.storage.v1alpha2; import com.google.api.core.BetaApi; import com.google.api.gax.core.BackgroundResource; import com.google.api.gax.rpc.BidiStreamingCallable; import com.google.api.gax.rpc.UnaryCallable; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.AppendRowsRequest; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.AppendRowsResponse; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.BatchCommitWriteStreamsRequest; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.BatchCommitWriteStreamsResponse; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.CreateWriteStreamRequest; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.FinalizeWriteStreamRequest; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.FinalizeWriteStreamResponse; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.FlushRowsRequest; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.FlushRowsResponse; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.GetWriteStreamRequest; -import com.google.cloud.bigquery.storage.v1alpha2.Stream.WriteStream; import com.google.cloud.bigquery.storage.v1alpha2.stub.BigQueryWriteStub; import com.google.cloud.bigquery.storage.v1alpha2.stub.BigQueryWriteStubSettings; import java.io.IOException; +import java.util.Objects; import java.util.concurrent.TimeUnit; import javax.annotation.Generated; -// AUTO-GENERATED DOCUMENTATION AND SERVICE +// AUTO-GENERATED DOCUMENTATION AND CLASS. /** * Service Description: BigQuery Write API. * @@ -45,17 +36,7 @@ *

This class provides the ability to make remote calls to the backing service through method * calls that map to API methods. Sample code to get started: * - *

- * 
- * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
- *   TableName parent = TableName.of("[PROJECT]", "[DATASET]", "[TABLE]");
- *   WriteStream writeStream = WriteStream.newBuilder().build();
- *   WriteStream response = bigQueryWriteClient.createWriteStream(parent, writeStream);
- * }
- * 
- * 
- * - *

Note: close() needs to be called on the bigQueryWriteClient object to clean up resources such + *

Note: close() needs to be called on the BigQueryWriteClient object to clean up resources such * as threads. In the example above, try-with-resources is used, which automatically calls close(). * *

The surface of this class includes several types of Java methods for each of the API's @@ -83,30 +64,26 @@ * *

To customize credentials: * - *

- * 
+ * 
{@code
  * BigQueryWriteSettings bigQueryWriteSettings =
  *     BigQueryWriteSettings.newBuilder()
  *         .setCredentialsProvider(FixedCredentialsProvider.create(myCredentials))
  *         .build();
- * BigQueryWriteClient bigQueryWriteClient =
- *     BigQueryWriteClient.create(bigQueryWriteSettings);
- * 
- * 
+ * BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create(bigQueryWriteSettings); + * }
* - * To customize the endpoint: + *

To customize the endpoint: * - *

- * 
+ * 
{@code
  * BigQueryWriteSettings bigQueryWriteSettings =
  *     BigQueryWriteSettings.newBuilder().setEndpoint(myEndpoint).build();
- * BigQueryWriteClient bigQueryWriteClient =
- *     BigQueryWriteClient.create(bigQueryWriteSettings);
- * 
- * 
+ * BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create(bigQueryWriteSettings); + * }
+ * + *

Please refer to the GitHub repository's samples for more quickstart code snippets. */ -@Generated("by gapic-generator") @BetaApi +@Generated("by gapic-generator") public class BigQueryWriteClient implements BackgroundResource { private final BigQueryWriteSettings settings; private final BigQueryWriteStub stub; @@ -127,7 +104,7 @@ public static final BigQueryWriteClient create(BigQueryWriteSettings settings) /** * Constructs an instance of BigQueryWriteClient, using the given stub for making calls. This is - * for advanced usage - prefer to use BigQueryWriteSettings}. + * for advanced usage - prefer using create(BigQueryWriteSettings). */ @BetaApi("A restructuring of stub classes is planned, so this may break in the future") public static final BigQueryWriteClient create(BigQueryWriteStub stub) { @@ -159,109 +136,66 @@ public BigQueryWriteStub getStub() { return stub; } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Creates a write stream to the given table. * - *

Sample code: - * - *


-   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
-   *   TableName parent = TableName.of("[PROJECT]", "[DATASET]", "[TABLE]");
-   *   WriteStream writeStream = WriteStream.newBuilder().build();
-   *   WriteStream response = bigQueryWriteClient.createWriteStream(parent, writeStream);
-   * }
-   * 
- * * @param parent Required. Reference to the table to which the stream belongs, in the format of * `projects/{project}/datasets/{dataset}/tables/{table}`. - * @param writeStream Required. Stream to be created. + * @param write_stream Required. Stream to be created. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ - public final WriteStream createWriteStream(TableName parent, WriteStream writeStream) { - CreateWriteStreamRequest request = - CreateWriteStreamRequest.newBuilder() - .setParent(parent == null ? null : parent.toString()) + public final Stream.WriteStream createWriteStream( + TableName parent, Stream.WriteStream writeStream) { + Storage.CreateWriteStreamRequest request = + Storage.CreateWriteStreamRequest.newBuilder() + .setParent(Objects.isNull(parent) ? null : parent.toString()) .setWriteStream(writeStream) .build(); return createWriteStream(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Creates a write stream to the given table. * - *

Sample code: - * - *


-   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
-   *   TableName parent = TableName.of("[PROJECT]", "[DATASET]", "[TABLE]");
-   *   WriteStream writeStream = WriteStream.newBuilder().build();
-   *   WriteStream response = bigQueryWriteClient.createWriteStream(parent.toString(), writeStream);
-   * }
-   * 
- * * @param parent Required. Reference to the table to which the stream belongs, in the format of * `projects/{project}/datasets/{dataset}/tables/{table}`. - * @param writeStream Required. Stream to be created. + * @param write_stream Required. Stream to be created. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ - public final WriteStream createWriteStream(String parent, WriteStream writeStream) { - CreateWriteStreamRequest request = - CreateWriteStreamRequest.newBuilder().setParent(parent).setWriteStream(writeStream).build(); + public final Stream.WriteStream createWriteStream(String parent, Stream.WriteStream writeStream) { + Storage.CreateWriteStreamRequest request = + Storage.CreateWriteStreamRequest.newBuilder() + .setParent(parent) + .setWriteStream(writeStream) + .build(); return createWriteStream(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Creates a write stream to the given table. * - *

Sample code: - * - *


-   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
-   *   TableName parent = TableName.of("[PROJECT]", "[DATASET]", "[TABLE]");
-   *   WriteStream writeStream = WriteStream.newBuilder().build();
-   *   CreateWriteStreamRequest request = CreateWriteStreamRequest.newBuilder()
-   *     .setParent(parent.toString())
-   *     .setWriteStream(writeStream)
-   *     .build();
-   *   WriteStream response = bigQueryWriteClient.createWriteStream(request);
-   * }
-   * 
- * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ - public final WriteStream createWriteStream(CreateWriteStreamRequest request) { + public final Stream.WriteStream createWriteStream(Storage.CreateWriteStreamRequest request) { return createWriteStreamCallable().call(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Creates a write stream to the given table. * *

Sample code: - * - *


-   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
-   *   TableName parent = TableName.of("[PROJECT]", "[DATASET]", "[TABLE]");
-   *   WriteStream writeStream = WriteStream.newBuilder().build();
-   *   CreateWriteStreamRequest request = CreateWriteStreamRequest.newBuilder()
-   *     .setParent(parent.toString())
-   *     .setWriteStream(writeStream)
-   *     .build();
-   *   ApiFuture<WriteStream> future = bigQueryWriteClient.createWriteStreamCallable().futureCall(request);
-   *   // Do something
-   *   WriteStream response = future.get();
-   * }
-   * 
*/ - public final UnaryCallable createWriteStreamCallable() { + public final UnaryCallable + createWriteStreamCallable() { return stub.createWriteStreamCallable(); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Appends data to the given stream. * @@ -281,396 +215,229 @@ public final UnaryCallable createWriteStr * the stream is committed. * *

Sample code: - * - *


-   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
-   *   BidiStream<AppendRowsRequest, AppendRowsResponse> bidiStream =
-   *       bigQueryWriteClient.appendRowsCallable().call();
-   *
-   *   WriteStreamName writeStream = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]");
-   *   AppendRowsRequest request = AppendRowsRequest.newBuilder()
-   *     .setWriteStream(writeStream.toString())
-   *     .build();
-   *   bidiStream.send(request);
-   *   for (AppendRowsResponse response : bidiStream) {
-   *     // Do something when receive a response
-   *   }
-   * }
-   * 
*/ - public final BidiStreamingCallable appendRowsCallable() { + public final BidiStreamingCallable + appendRowsCallable() { return stub.appendRowsCallable(); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Gets a write stream. * - *

Sample code: - * - *


-   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
-   *   WriteStreamName name = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]");
-   *   WriteStream response = bigQueryWriteClient.getWriteStream(name);
-   * }
-   * 
- * * @param name Required. Name of the stream to get, in the form of * `projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}`. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ - public final WriteStream getWriteStream(WriteStreamName name) { - GetWriteStreamRequest request = - GetWriteStreamRequest.newBuilder().setName(name == null ? null : name.toString()).build(); + public final Stream.WriteStream getWriteStream(WriteStreamName name) { + Storage.GetWriteStreamRequest request = + Storage.GetWriteStreamRequest.newBuilder() + .setName(Objects.isNull(name) ? null : name.toString()) + .build(); return getWriteStream(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Gets a write stream. * - *

Sample code: - * - *


-   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
-   *   WriteStreamName name = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]");
-   *   WriteStream response = bigQueryWriteClient.getWriteStream(name.toString());
-   * }
-   * 
- * * @param name Required. Name of the stream to get, in the form of * `projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}`. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ - public final WriteStream getWriteStream(String name) { - GetWriteStreamRequest request = GetWriteStreamRequest.newBuilder().setName(name).build(); + public final Stream.WriteStream getWriteStream(String name) { + Storage.GetWriteStreamRequest request = + Storage.GetWriteStreamRequest.newBuilder().setName(name).build(); return getWriteStream(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Gets a write stream. * - *

Sample code: - * - *


-   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
-   *   WriteStreamName name = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]");
-   *   GetWriteStreamRequest request = GetWriteStreamRequest.newBuilder()
-   *     .setName(name.toString())
-   *     .build();
-   *   WriteStream response = bigQueryWriteClient.getWriteStream(request);
-   * }
-   * 
- * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ - public final WriteStream getWriteStream(GetWriteStreamRequest request) { + public final Stream.WriteStream getWriteStream(Storage.GetWriteStreamRequest request) { return getWriteStreamCallable().call(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Gets a write stream. * *

Sample code: - * - *


-   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
-   *   WriteStreamName name = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]");
-   *   GetWriteStreamRequest request = GetWriteStreamRequest.newBuilder()
-   *     .setName(name.toString())
-   *     .build();
-   *   ApiFuture<WriteStream> future = bigQueryWriteClient.getWriteStreamCallable().futureCall(request);
-   *   // Do something
-   *   WriteStream response = future.get();
-   * }
-   * 
*/ - public final UnaryCallable getWriteStreamCallable() { + public final UnaryCallable + getWriteStreamCallable() { return stub.getWriteStreamCallable(); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Finalize a write stream so that no new data can be appended to the stream. * - *

Sample code: - * - *


-   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
-   *   WriteStreamName name = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]");
-   *   FinalizeWriteStreamResponse response = bigQueryWriteClient.finalizeWriteStream(name);
-   * }
-   * 
- * * @param name Required. Name of the stream to finalize, in the form of * `projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}`. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ - public final FinalizeWriteStreamResponse finalizeWriteStream(WriteStreamName name) { - FinalizeWriteStreamRequest request = - FinalizeWriteStreamRequest.newBuilder() - .setName(name == null ? null : name.toString()) + public final Storage.FinalizeWriteStreamResponse finalizeWriteStream(WriteStreamName name) { + Storage.FinalizeWriteStreamRequest request = + Storage.FinalizeWriteStreamRequest.newBuilder() + .setName(Objects.isNull(name) ? null : name.toString()) .build(); return finalizeWriteStream(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Finalize a write stream so that no new data can be appended to the stream. * - *

Sample code: - * - *


-   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
-   *   WriteStreamName name = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]");
-   *   FinalizeWriteStreamResponse response = bigQueryWriteClient.finalizeWriteStream(name.toString());
-   * }
-   * 
- * * @param name Required. Name of the stream to finalize, in the form of * `projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}`. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ - public final FinalizeWriteStreamResponse finalizeWriteStream(String name) { - FinalizeWriteStreamRequest request = - FinalizeWriteStreamRequest.newBuilder().setName(name).build(); + public final Storage.FinalizeWriteStreamResponse finalizeWriteStream(String name) { + Storage.FinalizeWriteStreamRequest request = + Storage.FinalizeWriteStreamRequest.newBuilder().setName(name).build(); return finalizeWriteStream(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Finalize a write stream so that no new data can be appended to the stream. * - *

Sample code: - * - *


-   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
-   *   WriteStreamName name = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]");
-   *   FinalizeWriteStreamRequest request = FinalizeWriteStreamRequest.newBuilder()
-   *     .setName(name.toString())
-   *     .build();
-   *   FinalizeWriteStreamResponse response = bigQueryWriteClient.finalizeWriteStream(request);
-   * }
-   * 
- * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ - public final FinalizeWriteStreamResponse finalizeWriteStream(FinalizeWriteStreamRequest request) { + public final Storage.FinalizeWriteStreamResponse finalizeWriteStream( + Storage.FinalizeWriteStreamRequest request) { return finalizeWriteStreamCallable().call(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Finalize a write stream so that no new data can be appended to the stream. * *

Sample code: - * - *


-   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
-   *   WriteStreamName name = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]");
-   *   FinalizeWriteStreamRequest request = FinalizeWriteStreamRequest.newBuilder()
-   *     .setName(name.toString())
-   *     .build();
-   *   ApiFuture<FinalizeWriteStreamResponse> future = bigQueryWriteClient.finalizeWriteStreamCallable().futureCall(request);
-   *   // Do something
-   *   FinalizeWriteStreamResponse response = future.get();
-   * }
-   * 
*/ - public final UnaryCallable + public final UnaryCallable< + Storage.FinalizeWriteStreamRequest, Storage.FinalizeWriteStreamResponse> finalizeWriteStreamCallable() { return stub.finalizeWriteStreamCallable(); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Atomically commits a group of `PENDING` streams that belong to the same `parent` table. Streams * must be finalized before commit and cannot be committed multiple times. Once a stream is * committed, data in the stream becomes available for read operations. * - *

Sample code: - * - *


-   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
-   *   TableName parent = TableName.of("[PROJECT]", "[DATASET]", "[TABLE]");
-   *   BatchCommitWriteStreamsResponse response = bigQueryWriteClient.batchCommitWriteStreams(parent);
-   * }
-   * 
- * * @param parent Required. Parent table that all the streams should belong to, in the form of * `projects/{project}/datasets/{dataset}/tables/{table}`. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ - public final BatchCommitWriteStreamsResponse batchCommitWriteStreams(TableName parent) { - BatchCommitWriteStreamsRequest request = - BatchCommitWriteStreamsRequest.newBuilder() - .setParent(parent == null ? null : parent.toString()) + public final Storage.BatchCommitWriteStreamsResponse batchCommitWriteStreams(TableName parent) { + Storage.BatchCommitWriteStreamsRequest request = + Storage.BatchCommitWriteStreamsRequest.newBuilder() + .setParent(Objects.isNull(parent) ? null : parent.toString()) .build(); return batchCommitWriteStreams(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Atomically commits a group of `PENDING` streams that belong to the same `parent` table. Streams * must be finalized before commit and cannot be committed multiple times. Once a stream is * committed, data in the stream becomes available for read operations. * - *

Sample code: - * - *


-   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
-   *   TableName parent = TableName.of("[PROJECT]", "[DATASET]", "[TABLE]");
-   *   BatchCommitWriteStreamsResponse response = bigQueryWriteClient.batchCommitWriteStreams(parent.toString());
-   * }
-   * 
- * * @param parent Required. Parent table that all the streams should belong to, in the form of * `projects/{project}/datasets/{dataset}/tables/{table}`. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ - public final BatchCommitWriteStreamsResponse batchCommitWriteStreams(String parent) { - BatchCommitWriteStreamsRequest request = - BatchCommitWriteStreamsRequest.newBuilder().setParent(parent).build(); + public final Storage.BatchCommitWriteStreamsResponse batchCommitWriteStreams(String parent) { + Storage.BatchCommitWriteStreamsRequest request = + Storage.BatchCommitWriteStreamsRequest.newBuilder().setParent(parent).build(); return batchCommitWriteStreams(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Atomically commits a group of `PENDING` streams that belong to the same `parent` table. Streams * must be finalized before commit and cannot be committed multiple times. Once a stream is * committed, data in the stream becomes available for read operations. * - *

Sample code: - * - *


-   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
-   *   TableName parent = TableName.of("[PROJECT]", "[DATASET]", "[TABLE]");
-   *   List<String> writeStreams = new ArrayList<>();
-   *   BatchCommitWriteStreamsRequest request = BatchCommitWriteStreamsRequest.newBuilder()
-   *     .setParent(parent.toString())
-   *     .addAllWriteStreams(writeStreams)
-   *     .build();
-   *   BatchCommitWriteStreamsResponse response = bigQueryWriteClient.batchCommitWriteStreams(request);
-   * }
-   * 
- * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ - public final BatchCommitWriteStreamsResponse batchCommitWriteStreams( - BatchCommitWriteStreamsRequest request) { + public final Storage.BatchCommitWriteStreamsResponse batchCommitWriteStreams( + Storage.BatchCommitWriteStreamsRequest request) { return batchCommitWriteStreamsCallable().call(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Atomically commits a group of `PENDING` streams that belong to the same `parent` table. Streams * must be finalized before commit and cannot be committed multiple times. Once a stream is * committed, data in the stream becomes available for read operations. * *

Sample code: - * - *


-   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
-   *   TableName parent = TableName.of("[PROJECT]", "[DATASET]", "[TABLE]");
-   *   List<String> writeStreams = new ArrayList<>();
-   *   BatchCommitWriteStreamsRequest request = BatchCommitWriteStreamsRequest.newBuilder()
-   *     .setParent(parent.toString())
-   *     .addAllWriteStreams(writeStreams)
-   *     .build();
-   *   ApiFuture<BatchCommitWriteStreamsResponse> future = bigQueryWriteClient.batchCommitWriteStreamsCallable().futureCall(request);
-   *   // Do something
-   *   BatchCommitWriteStreamsResponse response = future.get();
-   * }
-   * 
*/ - public final UnaryCallable + public final UnaryCallable< + Storage.BatchCommitWriteStreamsRequest, Storage.BatchCommitWriteStreamsResponse> batchCommitWriteStreamsCallable() { return stub.batchCommitWriteStreamsCallable(); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Flushes rows to a BUFFERED stream. If users are appending rows to BUFFERED stream, flush * operation is required in order for the rows to become available for reading. A Flush operation * flushes up to any previously flushed offset in a BUFFERED stream, to the offset specified in * the request. * - *

Sample code: - * - *


-   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
-   *   WriteStreamName writeStream = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]");
-   *   FlushRowsResponse response = bigQueryWriteClient.flushRows(writeStream);
-   * }
-   * 
- * - * @param writeStream Required. The stream that is the target of the flush operation. + * @param write_stream Required. The stream that is the target of the flush operation. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ - public final FlushRowsResponse flushRows(WriteStreamName writeStream) { - FlushRowsRequest request = - FlushRowsRequest.newBuilder() - .setWriteStream(writeStream == null ? null : writeStream.toString()) + public final Storage.FlushRowsResponse flushRows(WriteStreamName writeStream) { + Storage.FlushRowsRequest request = + Storage.FlushRowsRequest.newBuilder() + .setWriteStream(Objects.isNull(writeStream) ? null : writeStream.toString()) .build(); return flushRows(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Flushes rows to a BUFFERED stream. If users are appending rows to BUFFERED stream, flush * operation is required in order for the rows to become available for reading. A Flush operation * flushes up to any previously flushed offset in a BUFFERED stream, to the offset specified in * the request. * - *

Sample code: - * - *


-   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
-   *   WriteStreamName writeStream = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]");
-   *   FlushRowsResponse response = bigQueryWriteClient.flushRows(writeStream.toString());
-   * }
-   * 
- * - * @param writeStream Required. The stream that is the target of the flush operation. + * @param write_stream Required. The stream that is the target of the flush operation. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ - public final FlushRowsResponse flushRows(String writeStream) { - FlushRowsRequest request = FlushRowsRequest.newBuilder().setWriteStream(writeStream).build(); + public final Storage.FlushRowsResponse flushRows(String writeStream) { + Storage.FlushRowsRequest request = + Storage.FlushRowsRequest.newBuilder().setWriteStream(writeStream).build(); return flushRows(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Flushes rows to a BUFFERED stream. If users are appending rows to BUFFERED stream, flush * operation is required in order for the rows to become available for reading. A Flush operation * flushes up to any previously flushed offset in a BUFFERED stream, to the offset specified in * the request. * - *

Sample code: - * - *


-   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
-   *   WriteStreamName writeStream = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]");
-   *   FlushRowsRequest request = FlushRowsRequest.newBuilder()
-   *     .setWriteStream(writeStream.toString())
-   *     .build();
-   *   FlushRowsResponse response = bigQueryWriteClient.flushRows(request);
-   * }
-   * 
- * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ - public final FlushRowsResponse flushRows(FlushRowsRequest request) { + public final Storage.FlushRowsResponse flushRows(Storage.FlushRowsRequest request) { return flushRowsCallable().call(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Flushes rows to a BUFFERED stream. If users are appending rows to BUFFERED stream, flush * operation is required in order for the rows to become available for reading. A Flush operation @@ -678,20 +445,9 @@ public final FlushRowsResponse flushRows(FlushRowsRequest request) { * the request. * *

Sample code: - * - *


-   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
-   *   WriteStreamName writeStream = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]");
-   *   FlushRowsRequest request = FlushRowsRequest.newBuilder()
-   *     .setWriteStream(writeStream.toString())
-   *     .build();
-   *   ApiFuture<FlushRowsResponse> future = bigQueryWriteClient.flushRowsCallable().futureCall(request);
-   *   // Do something
-   *   FlushRowsResponse response = future.get();
-   * }
-   * 
*/ - public final UnaryCallable flushRowsCallable() { + public final UnaryCallable + flushRowsCallable() { return stub.flushRowsCallable(); } diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/BigQueryWriteSettings.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/BigQueryWriteSettings.java index a029c17d0e..2bfe1ee7e0 100644 --- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/BigQueryWriteSettings.java +++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/BigQueryWriteSettings.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,6 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.bigquery.storage.v1alpha2; import com.google.api.core.ApiFunction; @@ -26,23 +27,12 @@ import com.google.api.gax.rpc.StreamingCallSettings; import com.google.api.gax.rpc.TransportChannelProvider; import com.google.api.gax.rpc.UnaryCallSettings; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.AppendRowsRequest; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.AppendRowsResponse; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.BatchCommitWriteStreamsRequest; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.BatchCommitWriteStreamsResponse; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.CreateWriteStreamRequest; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.FinalizeWriteStreamRequest; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.FinalizeWriteStreamResponse; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.FlushRowsRequest; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.FlushRowsResponse; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.GetWriteStreamRequest; -import com.google.cloud.bigquery.storage.v1alpha2.Stream.WriteStream; import com.google.cloud.bigquery.storage.v1alpha2.stub.BigQueryWriteStubSettings; import java.io.IOException; import java.util.List; import javax.annotation.Generated; -// AUTO-GENERATED DOCUMENTATION AND CLASS +// AUTO-GENERATED DOCUMENTATION AND CLASS. /** * Settings class to configure an instance of {@link BigQueryWriteClient}. * @@ -60,52 +50,57 @@ * *

For example, to set the total timeout of createWriteStream to 30 seconds: * - *

- * 
- * BigQueryWriteSettings.Builder bigQueryWriteSettingsBuilder =
- *     BigQueryWriteSettings.newBuilder();
+ * 
{@code
+ * BigQueryWriteSettings.Builder bigQueryWriteSettingsBuilder = BigQueryWriteSettings.newBuilder();
  * bigQueryWriteSettingsBuilder
  *     .createWriteStreamSettings()
  *     .setRetrySettings(
- *         bigQueryWriteSettingsBuilder.createWriteStreamSettings().getRetrySettings().toBuilder()
+ *         bigQueryWriteSettingsBuilder
+ *             .createWriteStreamSettings()
+ *             .getRetrySettings()
+ *             .toBuilder()
  *             .setTotalTimeout(Duration.ofSeconds(30))
  *             .build());
  * BigQueryWriteSettings bigQueryWriteSettings = bigQueryWriteSettingsBuilder.build();
- * 
- * 
+ * }
*/ -@Generated("by gapic-generator") -@BetaApi +@Generated("by gapic-generator-java") public class BigQueryWriteSettings extends ClientSettings { + /** Returns the object with the settings used for calls to createWriteStream. */ - public UnaryCallSettings createWriteStreamSettings() { + public UnaryCallSettings + createWriteStreamSettings() { return ((BigQueryWriteStubSettings) getStubSettings()).createWriteStreamSettings(); } /** Returns the object with the settings used for calls to appendRows. */ - public StreamingCallSettings appendRowsSettings() { + public StreamingCallSettings + appendRowsSettings() { return ((BigQueryWriteStubSettings) getStubSettings()).appendRowsSettings(); } /** Returns the object with the settings used for calls to getWriteStream. */ - public UnaryCallSettings getWriteStreamSettings() { + public UnaryCallSettings + getWriteStreamSettings() { return ((BigQueryWriteStubSettings) getStubSettings()).getWriteStreamSettings(); } /** Returns the object with the settings used for calls to finalizeWriteStream. */ - public UnaryCallSettings + public UnaryCallSettings finalizeWriteStreamSettings() { return ((BigQueryWriteStubSettings) getStubSettings()).finalizeWriteStreamSettings(); } /** Returns the object with the settings used for calls to batchCommitWriteStreams. */ - public UnaryCallSettings + public UnaryCallSettings< + Storage.BatchCommitWriteStreamsRequest, Storage.BatchCommitWriteStreamsResponse> batchCommitWriteStreamsSettings() { return ((BigQueryWriteStubSettings) getStubSettings()).batchCommitWriteStreamsSettings(); } /** Returns the object with the settings used for calls to flushRows. */ - public UnaryCallSettings flushRowsSettings() { + public UnaryCallSettings + flushRowsSettings() { return ((BigQueryWriteStubSettings) getStubSettings()).flushRowsSettings(); } @@ -169,18 +164,15 @@ protected BigQueryWriteSettings(Builder settingsBuilder) throws IOException { /** Builder for BigQueryWriteSettings. */ public static class Builder extends ClientSettings.Builder { + protected Builder() throws IOException { - this((ClientContext) null); + this(((ClientContext) null)); } protected Builder(ClientContext clientContext) { super(BigQueryWriteStubSettings.newBuilder(clientContext)); } - private static Builder createDefault() { - return new Builder(BigQueryWriteStubSettings.newBuilder()); - } - protected Builder(BigQueryWriteSettings settings) { super(settings.getStubSettings().toBuilder()); } @@ -189,11 +181,15 @@ protected Builder(BigQueryWriteStubSettings.Builder stubSettings) { super(stubSettings); } + private static Builder createDefault() { + return new Builder(BigQueryWriteStubSettings.newBuilder()); + } + public BigQueryWriteStubSettings.Builder getStubSettingsBuilder() { return ((BigQueryWriteStubSettings.Builder) getStubSettings()); } - // NEXT_MAJOR_VER: remove 'throws Exception' + // NEXT_MAJOR_VER: remove 'throws Exception'. /** * Applies the given settings updater function to all of the unary API methods in this service. * @@ -207,37 +203,40 @@ public Builder applyToAllUnaryMethods( } /** Returns the builder for the settings used for calls to createWriteStream. */ - public UnaryCallSettings.Builder + public UnaryCallSettings.Builder createWriteStreamSettings() { return getStubSettingsBuilder().createWriteStreamSettings(); } /** Returns the builder for the settings used for calls to appendRows. */ - public StreamingCallSettings.Builder + public StreamingCallSettings.Builder appendRowsSettings() { return getStubSettingsBuilder().appendRowsSettings(); } /** Returns the builder for the settings used for calls to getWriteStream. */ - public UnaryCallSettings.Builder getWriteStreamSettings() { + public UnaryCallSettings.Builder + getWriteStreamSettings() { return getStubSettingsBuilder().getWriteStreamSettings(); } /** Returns the builder for the settings used for calls to finalizeWriteStream. */ - public UnaryCallSettings.Builder + public UnaryCallSettings.Builder< + Storage.FinalizeWriteStreamRequest, Storage.FinalizeWriteStreamResponse> finalizeWriteStreamSettings() { return getStubSettingsBuilder().finalizeWriteStreamSettings(); } /** Returns the builder for the settings used for calls to batchCommitWriteStreams. */ public UnaryCallSettings.Builder< - BatchCommitWriteStreamsRequest, BatchCommitWriteStreamsResponse> + Storage.BatchCommitWriteStreamsRequest, Storage.BatchCommitWriteStreamsResponse> batchCommitWriteStreamsSettings() { return getStubSettingsBuilder().batchCommitWriteStreamsSettings(); } /** Returns the builder for the settings used for calls to flushRows. */ - public UnaryCallSettings.Builder flushRowsSettings() { + public UnaryCallSettings.Builder + flushRowsSettings() { return getStubSettingsBuilder().flushRowsSettings(); } diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/package-info.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/package-info.java index d5a0a66695..561987d3b3 100644 --- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/package-info.java +++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/package-info.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,29 +15,17 @@ */ /** - * A client to BigQuery Storage API. + * The interfaces provided are listed below, along with usage samples. * - *

The interfaces provided are listed below, along with usage samples. - * - *

=================== BigQueryWriteClient =================== + *

======================= BigQueryWriteClient ======================= * *

Service Description: BigQuery Write API. * *

The Write API can be used to write data to BigQuery. * *

Sample for BigQueryWriteClient: - * - *

- * 
- * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
- *   TableName parent = TableName.of("[PROJECT]", "[DATASET]", "[TABLE]");
- *   WriteStream writeStream = WriteStream.newBuilder().build();
- *   WriteStream response = bigQueryWriteClient.createWriteStream(parent, writeStream);
- * }
- * 
- * 
*/ -@Generated("by gapic-generator") +@Generated("by gapic-generator-java") package com.google.cloud.bigquery.storage.v1alpha2; import javax.annotation.Generated; diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/stub/BigQueryWriteStub.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/stub/BigQueryWriteStub.java index c86dcd8a28..27ef0b03d7 100644 --- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/stub/BigQueryWriteStub.java +++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/stub/BigQueryWriteStub.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,58 +13,51 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.bigquery.storage.v1alpha2.stub; -import com.google.api.core.BetaApi; import com.google.api.gax.core.BackgroundResource; import com.google.api.gax.rpc.BidiStreamingCallable; import com.google.api.gax.rpc.UnaryCallable; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.AppendRowsRequest; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.AppendRowsResponse; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.BatchCommitWriteStreamsRequest; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.BatchCommitWriteStreamsResponse; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.CreateWriteStreamRequest; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.FinalizeWriteStreamRequest; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.FinalizeWriteStreamResponse; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.FlushRowsRequest; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.FlushRowsResponse; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.GetWriteStreamRequest; -import com.google.cloud.bigquery.storage.v1alpha2.Stream.WriteStream; +import com.google.cloud.bigquery.storage.v1alpha2.Storage; +import com.google.cloud.bigquery.storage.v1alpha2.Stream; import javax.annotation.Generated; -// AUTO-GENERATED DOCUMENTATION AND CLASS +// AUTO-GENERATED DOCUMENTATION AND CLASS. /** - * Base stub class for BigQuery Storage API. + * Base stub class for the BigQueryWrite service API. * *

This class is for advanced usage and reflects the underlying API directly. */ @Generated("by gapic-generator") -@BetaApi("A restructuring of stub classes is planned, so this may break in the future") public abstract class BigQueryWriteStub implements BackgroundResource { - public UnaryCallable createWriteStreamCallable() { + public UnaryCallable + createWriteStreamCallable() { throw new UnsupportedOperationException("Not implemented: createWriteStreamCallable()"); } - public BidiStreamingCallable appendRowsCallable() { + public BidiStreamingCallable + appendRowsCallable() { throw new UnsupportedOperationException("Not implemented: appendRowsCallable()"); } - public UnaryCallable getWriteStreamCallable() { + public UnaryCallable getWriteStreamCallable() { throw new UnsupportedOperationException("Not implemented: getWriteStreamCallable()"); } - public UnaryCallable + public UnaryCallable finalizeWriteStreamCallable() { throw new UnsupportedOperationException("Not implemented: finalizeWriteStreamCallable()"); } - public UnaryCallable + public UnaryCallable< + Storage.BatchCommitWriteStreamsRequest, Storage.BatchCommitWriteStreamsResponse> batchCommitWriteStreamsCallable() { throw new UnsupportedOperationException("Not implemented: batchCommitWriteStreamsCallable()"); } - public UnaryCallable flushRowsCallable() { + public UnaryCallable flushRowsCallable() { throw new UnsupportedOperationException("Not implemented: flushRowsCallable()"); } diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/stub/BigQueryWriteStubSettings.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/stub/BigQueryWriteStubSettings.java index 09fd472e31..f1d4fc571d 100644 --- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/stub/BigQueryWriteStubSettings.java +++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/stub/BigQueryWriteStubSettings.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,6 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.bigquery.storage.v1alpha2.stub; import com.google.api.core.ApiFunction; @@ -31,17 +32,8 @@ import com.google.api.gax.rpc.StubSettings; import com.google.api.gax.rpc.TransportChannelProvider; import com.google.api.gax.rpc.UnaryCallSettings; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.AppendRowsRequest; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.AppendRowsResponse; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.BatchCommitWriteStreamsRequest; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.BatchCommitWriteStreamsResponse; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.CreateWriteStreamRequest; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.FinalizeWriteStreamRequest; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.FinalizeWriteStreamResponse; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.FlushRowsRequest; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.FlushRowsResponse; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.GetWriteStreamRequest; -import com.google.cloud.bigquery.storage.v1alpha2.Stream.WriteStream; +import com.google.cloud.bigquery.storage.v1alpha2.Storage; +import com.google.cloud.bigquery.storage.v1alpha2.Stream; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; @@ -51,7 +43,7 @@ import javax.annotation.Generated; import org.threeten.bp.Duration; -// AUTO-GENERATED DOCUMENTATION AND CLASS +// AUTO-GENERATED DOCUMENTATION AND CLASS. /** * Settings class to configure an instance of {@link BigQueryWriteStub}. * @@ -69,22 +61,23 @@ * *

For example, to set the total timeout of createWriteStream to 30 seconds: * - *

- * 
+ * 
{@code
  * BigQueryWriteStubSettings.Builder bigQueryWriteSettingsBuilder =
  *     BigQueryWriteStubSettings.newBuilder();
  * bigQueryWriteSettingsBuilder
  *     .createWriteStreamSettings()
  *     .setRetrySettings(
- *         bigQueryWriteSettingsBuilder.createWriteStreamSettings().getRetrySettings().toBuilder()
+ *         bigQueryWriteSettingsBuilder
+ *             .createWriteStreamSettings()
+ *             .getRetrySettings()
+ *             .toBuilder()
  *             .setTotalTimeout(Duration.ofSeconds(30))
  *             .build());
  * BigQueryWriteStubSettings bigQueryWriteSettings = bigQueryWriteSettingsBuilder.build();
- * 
- * 
+ * }
*/ -@Generated("by gapic-generator") @BetaApi +@Generated("by gapic-generator-java") public class BigQueryWriteStubSettings extends StubSettings { /** The default scopes of the service. */ private static final ImmutableList DEFAULT_SERVICE_SCOPES = @@ -94,44 +87,55 @@ public class BigQueryWriteStubSettings extends StubSettings createWriteStreamSettings; - private final StreamingCallSettings appendRowsSettings; - private final UnaryCallSettings getWriteStreamSettings; - private final UnaryCallSettings + private final UnaryCallSettings + createWriteStreamSettings; + private final StreamingCallSettings + appendRowsSettings; + private final UnaryCallSettings + getWriteStreamSettings; + private final UnaryCallSettings< + Storage.FinalizeWriteStreamRequest, Storage.FinalizeWriteStreamResponse> finalizeWriteStreamSettings; - private final UnaryCallSettings + private final UnaryCallSettings< + Storage.BatchCommitWriteStreamsRequest, Storage.BatchCommitWriteStreamsResponse> batchCommitWriteStreamsSettings; - private final UnaryCallSettings flushRowsSettings; + private final UnaryCallSettings + flushRowsSettings; /** Returns the object with the settings used for calls to createWriteStream. */ - public UnaryCallSettings createWriteStreamSettings() { + public UnaryCallSettings + createWriteStreamSettings() { return createWriteStreamSettings; } /** Returns the object with the settings used for calls to appendRows. */ - public StreamingCallSettings appendRowsSettings() { + public StreamingCallSettings + appendRowsSettings() { return appendRowsSettings; } /** Returns the object with the settings used for calls to getWriteStream. */ - public UnaryCallSettings getWriteStreamSettings() { + public UnaryCallSettings + getWriteStreamSettings() { return getWriteStreamSettings; } /** Returns the object with the settings used for calls to finalizeWriteStream. */ - public UnaryCallSettings + public UnaryCallSettings finalizeWriteStreamSettings() { return finalizeWriteStreamSettings; } /** Returns the object with the settings used for calls to batchCommitWriteStreams. */ - public UnaryCallSettings + public UnaryCallSettings< + Storage.BatchCommitWriteStreamsRequest, Storage.BatchCommitWriteStreamsResponse> batchCommitWriteStreamsSettings() { return batchCommitWriteStreamsSettings; } /** Returns the object with the settings used for calls to flushRows. */ - public UnaryCallSettings flushRowsSettings() { + public UnaryCallSettings + flushRowsSettings() { return flushRowsSettings; } @@ -141,10 +145,10 @@ public BigQueryWriteStub createStub() throws IOException { .getTransportName() .equals(GrpcTransportChannel.getGrpcTransportName())) { return GrpcBigQueryWriteStub.create(this); - } else { - throw new UnsupportedOperationException( - "Transport not supported: " + getTransportChannelProvider().getTransportName()); } + throw new UnsupportedOperationException( + String.format( + "Transport not supported: %s", getTransportChannelProvider().getTransportName())); } /** Returns a builder for the default ExecutorProvider for this service. */ @@ -215,20 +219,21 @@ protected BigQueryWriteStubSettings(Builder settingsBuilder) throws IOException /** Builder for BigQueryWriteStubSettings. */ public static class Builder extends StubSettings.Builder { private final ImmutableList> unaryMethodSettingsBuilders; - - private final UnaryCallSettings.Builder + private final UnaryCallSettings.Builder createWriteStreamSettings; - private final StreamingCallSettings.Builder + private final StreamingCallSettings.Builder< + Storage.AppendRowsRequest, Storage.AppendRowsResponse> appendRowsSettings; - private final UnaryCallSettings.Builder + private final UnaryCallSettings.Builder getWriteStreamSettings; - private final UnaryCallSettings.Builder + private final UnaryCallSettings.Builder< + Storage.FinalizeWriteStreamRequest, Storage.FinalizeWriteStreamResponse> finalizeWriteStreamSettings; private final UnaryCallSettings.Builder< - BatchCommitWriteStreamsRequest, BatchCommitWriteStreamsResponse> + Storage.BatchCommitWriteStreamsRequest, Storage.BatchCommitWriteStreamsResponse> batchCommitWriteStreamsSettings; - private final UnaryCallSettings.Builder flushRowsSettings; - + private final UnaryCallSettings.Builder + flushRowsSettings; private static final ImmutableMap> RETRYABLE_CODE_DEFINITIONS; @@ -236,23 +241,23 @@ public static class Builder extends StubSettings.Builder> definitions = ImmutableMap.builder(); definitions.put( - "retry_policy_1_codes", + "retry_policy_0_codes", ImmutableSet.copyOf( Lists.newArrayList( StatusCode.Code.DEADLINE_EXCEEDED, StatusCode.Code.UNAVAILABLE, StatusCode.Code.RESOURCE_EXHAUSTED))); - definitions.put("no_retry_codes", ImmutableSet.copyOf(Lists.newArrayList())); definitions.put( - "retry_policy_3_codes", + "retry_policy_1_codes", ImmutableSet.copyOf( Lists.newArrayList( - StatusCode.Code.DEADLINE_EXCEEDED, StatusCode.Code.UNAVAILABLE))); + StatusCode.Code.UNAVAILABLE, StatusCode.Code.RESOURCE_EXHAUSTED))); definitions.put( "retry_policy_2_codes", ImmutableSet.copyOf( Lists.newArrayList( - StatusCode.Code.UNAVAILABLE, StatusCode.Code.RESOURCE_EXHAUSTED))); + StatusCode.Code.DEADLINE_EXCEEDED, StatusCode.Code.UNAVAILABLE))); + definitions.put("no_retry_codes", ImmutableSet.copyOf(Lists.newArrayList())); RETRYABLE_CODE_DEFINITIONS = definitions.build(); } @@ -271,7 +276,7 @@ public static class Builder extends StubSettings.Builder>of( + createWriteStreamSettings, + getWriteStreamSettings, + finalizeWriteStreamSettings, + batchCommitWriteStreamsSettings, + flushRowsSettings); + } + private static Builder createDefault() { - Builder builder = new Builder((ClientContext) null); + Builder builder = new Builder(((ClientContext) null)); + builder.setTransportChannelProvider(defaultTransportChannelProvider()); builder.setCredentialsProvider(defaultCredentialsProviderBuilder().build()); builder.setInternalHeaderProvider(defaultApiClientHeaderProviderBuilder().build()); builder.setEndpoint(getDefaultEndpoint()); + return initDefaults(builder); } private static Builder initDefaults(Builder builder) { - builder .createWriteStreamSettings() - .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_1_codes")) - .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_1_params")); + .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) + .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")); builder .getWriteStreamSettings() - .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_3_codes")) - .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_3_params")); + .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_2_codes")) + .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_2_params")); builder .finalizeWriteStreamSettings() - .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_3_codes")) - .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_3_params")); + .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_2_codes")) + .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_2_params")); builder .batchCommitWriteStreamsSettings() - .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_3_codes")) - .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_3_params")); + .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_2_codes")) + .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_2_params")); builder .flushRowsSettings() @@ -368,26 +387,7 @@ private static Builder initDefaults(Builder builder) { return builder; } - protected Builder(BigQueryWriteStubSettings settings) { - super(settings); - - createWriteStreamSettings = settings.createWriteStreamSettings.toBuilder(); - appendRowsSettings = settings.appendRowsSettings.toBuilder(); - getWriteStreamSettings = settings.getWriteStreamSettings.toBuilder(); - finalizeWriteStreamSettings = settings.finalizeWriteStreamSettings.toBuilder(); - batchCommitWriteStreamsSettings = settings.batchCommitWriteStreamsSettings.toBuilder(); - flushRowsSettings = settings.flushRowsSettings.toBuilder(); - - unaryMethodSettingsBuilders = - ImmutableList.>of( - createWriteStreamSettings, - getWriteStreamSettings, - finalizeWriteStreamSettings, - batchCommitWriteStreamsSettings, - flushRowsSettings); - } - - // NEXT_MAJOR_VER: remove 'throws Exception' + // NEXT_MAJOR_VER: remove 'throws Exception'. /** * Applies the given settings updater function to all of the unary API methods in this service. * @@ -404,37 +404,40 @@ public Builder applyToAllUnaryMethods( } /** Returns the builder for the settings used for calls to createWriteStream. */ - public UnaryCallSettings.Builder + public UnaryCallSettings.Builder createWriteStreamSettings() { return createWriteStreamSettings; } /** Returns the builder for the settings used for calls to appendRows. */ - public StreamingCallSettings.Builder + public StreamingCallSettings.Builder appendRowsSettings() { return appendRowsSettings; } /** Returns the builder for the settings used for calls to getWriteStream. */ - public UnaryCallSettings.Builder getWriteStreamSettings() { + public UnaryCallSettings.Builder + getWriteStreamSettings() { return getWriteStreamSettings; } /** Returns the builder for the settings used for calls to finalizeWriteStream. */ - public UnaryCallSettings.Builder + public UnaryCallSettings.Builder< + Storage.FinalizeWriteStreamRequest, Storage.FinalizeWriteStreamResponse> finalizeWriteStreamSettings() { return finalizeWriteStreamSettings; } /** Returns the builder for the settings used for calls to batchCommitWriteStreams. */ public UnaryCallSettings.Builder< - BatchCommitWriteStreamsRequest, BatchCommitWriteStreamsResponse> + Storage.BatchCommitWriteStreamsRequest, Storage.BatchCommitWriteStreamsResponse> batchCommitWriteStreamsSettings() { return batchCommitWriteStreamsSettings; } /** Returns the builder for the settings used for calls to flushRows. */ - public UnaryCallSettings.Builder flushRowsSettings() { + public UnaryCallSettings.Builder + flushRowsSettings() { return flushRowsSettings; } diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/stub/GrpcBigQueryWriteCallableFactory.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/stub/GrpcBigQueryWriteCallableFactory.java index e1e5621cdf..f80038c9ae 100644 --- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/stub/GrpcBigQueryWriteCallableFactory.java +++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/stub/GrpcBigQueryWriteCallableFactory.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,9 +13,9 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.bigquery.storage.v1alpha2.stub; -import com.google.api.core.BetaApi; import com.google.api.gax.grpc.GrpcCallSettings; import com.google.api.gax.grpc.GrpcCallableFactory; import com.google.api.gax.grpc.GrpcStubCallableFactory; @@ -31,18 +31,19 @@ import com.google.api.gax.rpc.StreamingCallSettings; import com.google.api.gax.rpc.UnaryCallSettings; import com.google.api.gax.rpc.UnaryCallable; +import com.google.longrunning.Operation; import com.google.longrunning.stub.OperationsStub; import javax.annotation.Generated; -// AUTO-GENERATED DOCUMENTATION AND CLASS +// AUTO-GENERATED DOCUMENTATION AND CLASS. /** - * gRPC callable factory implementation for BigQuery Storage API. + * gRPC callable factory implementation for the BigQueryWrite service API. * *

This class is for advanced usage. */ @Generated("by gapic-generator") -@BetaApi("The surface for use by generated code is not stable yet and may change in the future.") public class GrpcBigQueryWriteCallableFactory implements GrpcStubCallableFactory { + @Override public UnaryCallable createUnaryCallable( GrpcCallSettings grpcCallSettings, @@ -55,61 +56,58 @@ public UnaryCallable createUnaryCalla public UnaryCallable createPagedCallable( GrpcCallSettings grpcCallSettings, - PagedCallSettings pagedCallSettings, + PagedCallSettings callSettings, ClientContext clientContext) { - return GrpcCallableFactory.createPagedCallable( - grpcCallSettings, pagedCallSettings, clientContext); + return GrpcCallableFactory.createPagedCallable(grpcCallSettings, callSettings, clientContext); } @Override public UnaryCallable createBatchingCallable( GrpcCallSettings grpcCallSettings, - BatchingCallSettings batchingCallSettings, + BatchingCallSettings callSettings, ClientContext clientContext) { return GrpcCallableFactory.createBatchingCallable( - grpcCallSettings, batchingCallSettings, clientContext); + grpcCallSettings, callSettings, clientContext); } - @BetaApi( - "The surface for long-running operations is not stable yet and may change in the future.") @Override public OperationCallable createOperationCallable( - GrpcCallSettings grpcCallSettings, - OperationCallSettings operationCallSettings, + GrpcCallSettings grpcCallSettings, + OperationCallSettings callSettings, ClientContext clientContext, OperationsStub operationsStub) { return GrpcCallableFactory.createOperationCallable( - grpcCallSettings, operationCallSettings, clientContext, operationsStub); + grpcCallSettings, callSettings, clientContext, operationsStub); } @Override public BidiStreamingCallable createBidiStreamingCallable( GrpcCallSettings grpcCallSettings, - StreamingCallSettings streamingCallSettings, + StreamingCallSettings callSettings, ClientContext clientContext) { return GrpcCallableFactory.createBidiStreamingCallable( - grpcCallSettings, streamingCallSettings, clientContext); + grpcCallSettings, callSettings, clientContext); } @Override public ServerStreamingCallable createServerStreamingCallable( GrpcCallSettings grpcCallSettings, - ServerStreamingCallSettings streamingCallSettings, + ServerStreamingCallSettings callSettings, ClientContext clientContext) { return GrpcCallableFactory.createServerStreamingCallable( - grpcCallSettings, streamingCallSettings, clientContext); + grpcCallSettings, callSettings, clientContext); } @Override public ClientStreamingCallable createClientStreamingCallable( GrpcCallSettings grpcCallSettings, - StreamingCallSettings streamingCallSettings, + StreamingCallSettings callSettings, ClientContext clientContext) { return GrpcCallableFactory.createClientStreamingCallable( - grpcCallSettings, streamingCallSettings, clientContext); + grpcCallSettings, callSettings, clientContext); } } diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/stub/GrpcBigQueryWriteStub.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/stub/GrpcBigQueryWriteStub.java index 7729ba6f9b..ccb0b89388 100644 --- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/stub/GrpcBigQueryWriteStub.java +++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/stub/GrpcBigQueryWriteStub.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,9 +13,9 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.bigquery.storage.v1alpha2.stub; -import com.google.api.core.BetaApi; import com.google.api.gax.core.BackgroundResource; import com.google.api.gax.core.BackgroundResourceAggregation; import com.google.api.gax.grpc.GrpcCallSettings; @@ -24,18 +24,10 @@ import com.google.api.gax.rpc.ClientContext; import com.google.api.gax.rpc.RequestParamsExtractor; import com.google.api.gax.rpc.UnaryCallable; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.AppendRowsRequest; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.AppendRowsResponse; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.BatchCommitWriteStreamsRequest; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.BatchCommitWriteStreamsResponse; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.CreateWriteStreamRequest; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.FinalizeWriteStreamRequest; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.FinalizeWriteStreamResponse; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.FlushRowsRequest; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.FlushRowsResponse; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.GetWriteStreamRequest; -import com.google.cloud.bigquery.storage.v1alpha2.Stream.WriteStream; +import com.google.cloud.bigquery.storage.v1alpha2.Storage; +import com.google.cloud.bigquery.storage.v1alpha2.Stream; import com.google.common.collect.ImmutableMap; +import com.google.longrunning.stub.GrpcOperationsStub; import io.grpc.MethodDescriptor; import io.grpc.protobuf.ProtoUtils; import java.io.IOException; @@ -43,88 +35,106 @@ import java.util.concurrent.TimeUnit; import javax.annotation.Generated; -// AUTO-GENERATED DOCUMENTATION AND CLASS +// AUTO-GENERATED DOCUMENTATION AND CLASS. /** - * gRPC stub implementation for BigQuery Storage API. + * gRPC stub implementation for the BigQueryWrite service API. * *

This class is for advanced usage and reflects the underlying API directly. */ -@Generated("by gapic-generator") -@BetaApi("A restructuring of stub classes is planned, so this may break in the future") +@Generated("by gapic-generator-java") public class GrpcBigQueryWriteStub extends BigQueryWriteStub { - - private static final MethodDescriptor + private static final MethodDescriptor createWriteStreamMethodDescriptor = - MethodDescriptor.newBuilder() + MethodDescriptor.newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName( "google.cloud.bigquery.storage.v1alpha2.BigQueryWrite/CreateWriteStream") .setRequestMarshaller( - ProtoUtils.marshaller(CreateWriteStreamRequest.getDefaultInstance())) - .setResponseMarshaller(ProtoUtils.marshaller(WriteStream.getDefaultInstance())) + ProtoUtils.marshaller(Storage.CreateWriteStreamRequest.getDefaultInstance())) + .setResponseMarshaller(ProtoUtils.marshaller(Stream.WriteStream.getDefaultInstance())) .build(); - private static final MethodDescriptor + + private static final MethodDescriptor appendRowsMethodDescriptor = - MethodDescriptor.newBuilder() + MethodDescriptor.newBuilder() .setType(MethodDescriptor.MethodType.BIDI_STREAMING) .setFullMethodName("google.cloud.bigquery.storage.v1alpha2.BigQueryWrite/AppendRows") - .setRequestMarshaller(ProtoUtils.marshaller(AppendRowsRequest.getDefaultInstance())) - .setResponseMarshaller(ProtoUtils.marshaller(AppendRowsResponse.getDefaultInstance())) + .setRequestMarshaller( + ProtoUtils.marshaller(Storage.AppendRowsRequest.getDefaultInstance())) + .setResponseMarshaller( + ProtoUtils.marshaller(Storage.AppendRowsResponse.getDefaultInstance())) .build(); - private static final MethodDescriptor + + private static final MethodDescriptor getWriteStreamMethodDescriptor = - MethodDescriptor.newBuilder() + MethodDescriptor.newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName( "google.cloud.bigquery.storage.v1alpha2.BigQueryWrite/GetWriteStream") .setRequestMarshaller( - ProtoUtils.marshaller(GetWriteStreamRequest.getDefaultInstance())) - .setResponseMarshaller(ProtoUtils.marshaller(WriteStream.getDefaultInstance())) + ProtoUtils.marshaller(Storage.GetWriteStreamRequest.getDefaultInstance())) + .setResponseMarshaller(ProtoUtils.marshaller(Stream.WriteStream.getDefaultInstance())) .build(); - private static final MethodDescriptor + + private static final MethodDescriptor< + Storage.FinalizeWriteStreamRequest, Storage.FinalizeWriteStreamResponse> finalizeWriteStreamMethodDescriptor = - MethodDescriptor.newBuilder() + MethodDescriptor + .newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName( "google.cloud.bigquery.storage.v1alpha2.BigQueryWrite/FinalizeWriteStream") .setRequestMarshaller( - ProtoUtils.marshaller(FinalizeWriteStreamRequest.getDefaultInstance())) + ProtoUtils.marshaller(Storage.FinalizeWriteStreamRequest.getDefaultInstance())) .setResponseMarshaller( - ProtoUtils.marshaller(FinalizeWriteStreamResponse.getDefaultInstance())) + ProtoUtils.marshaller(Storage.FinalizeWriteStreamResponse.getDefaultInstance())) .build(); + private static final MethodDescriptor< - BatchCommitWriteStreamsRequest, BatchCommitWriteStreamsResponse> + Storage.BatchCommitWriteStreamsRequest, Storage.BatchCommitWriteStreamsResponse> batchCommitWriteStreamsMethodDescriptor = MethodDescriptor - .newBuilder() + . + newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName( "google.cloud.bigquery.storage.v1alpha2.BigQueryWrite/BatchCommitWriteStreams") .setRequestMarshaller( - ProtoUtils.marshaller(BatchCommitWriteStreamsRequest.getDefaultInstance())) + ProtoUtils.marshaller( + Storage.BatchCommitWriteStreamsRequest.getDefaultInstance())) .setResponseMarshaller( - ProtoUtils.marshaller(BatchCommitWriteStreamsResponse.getDefaultInstance())) + ProtoUtils.marshaller( + Storage.BatchCommitWriteStreamsResponse.getDefaultInstance())) .build(); - private static final MethodDescriptor + + private static final MethodDescriptor flushRowsMethodDescriptor = - MethodDescriptor.newBuilder() + MethodDescriptor.newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName("google.cloud.bigquery.storage.v1alpha2.BigQueryWrite/FlushRows") - .setRequestMarshaller(ProtoUtils.marshaller(FlushRowsRequest.getDefaultInstance())) - .setResponseMarshaller(ProtoUtils.marshaller(FlushRowsResponse.getDefaultInstance())) + .setRequestMarshaller( + ProtoUtils.marshaller(Storage.FlushRowsRequest.getDefaultInstance())) + .setResponseMarshaller( + ProtoUtils.marshaller(Storage.FlushRowsResponse.getDefaultInstance())) .build(); - private final BackgroundResource backgroundResources; - - private final UnaryCallable createWriteStreamCallable; - private final BidiStreamingCallable appendRowsCallable; - private final UnaryCallable getWriteStreamCallable; - private final UnaryCallable + private final UnaryCallable + createWriteStreamCallable; + private final BidiStreamingCallable + appendRowsCallable; + private final UnaryCallable + getWriteStreamCallable; + private final UnaryCallable< + Storage.FinalizeWriteStreamRequest, Storage.FinalizeWriteStreamResponse> finalizeWriteStreamCallable; - private final UnaryCallable + private final UnaryCallable< + Storage.BatchCommitWriteStreamsRequest, Storage.BatchCommitWriteStreamsResponse> batchCommitWriteStreamsCallable; - private final UnaryCallable flushRowsCallable; + private final UnaryCallable + flushRowsCallable; + private final BackgroundResource backgroundResources; + private final GrpcOperationsStub operationsStub; private final GrpcStubCallableFactory callableFactory; public static final GrpcBigQueryWriteStub create(BigQueryWriteStubSettings settings) @@ -142,100 +152,110 @@ public static final GrpcBigQueryWriteStub create( BigQueryWriteStubSettings.newBuilder().build(), clientContext, callableFactory); } - /** - * Constructs an instance of GrpcBigQueryWriteStub, using the given settings. This is protected so - * that it is easy to make a subclass, but otherwise, the static factory methods should be - * preferred. - */ protected GrpcBigQueryWriteStub(BigQueryWriteStubSettings settings, ClientContext clientContext) throws IOException { this(settings, clientContext, new GrpcBigQueryWriteCallableFactory()); } - /** - * Constructs an instance of GrpcBigQueryWriteStub, using the given settings. This is protected so - * that it is easy to make a subclass, but otherwise, the static factory methods should be - * preferred. - */ protected GrpcBigQueryWriteStub( BigQueryWriteStubSettings settings, ClientContext clientContext, GrpcStubCallableFactory callableFactory) throws IOException { this.callableFactory = callableFactory; + this.operationsStub = GrpcOperationsStub.create(clientContext, callableFactory); - GrpcCallSettings createWriteStreamTransportSettings = - GrpcCallSettings.newBuilder() - .setMethodDescriptor(createWriteStreamMethodDescriptor) - .setParamsExtractor( - new RequestParamsExtractor() { - @Override - public Map extract(CreateWriteStreamRequest request) { - ImmutableMap.Builder params = ImmutableMap.builder(); - params.put("parent", String.valueOf(request.getParent())); - return params.build(); - } - }) - .build(); - GrpcCallSettings appendRowsTransportSettings = - GrpcCallSettings.newBuilder() - .setMethodDescriptor(appendRowsMethodDescriptor) - .build(); - GrpcCallSettings getWriteStreamTransportSettings = - GrpcCallSettings.newBuilder() - .setMethodDescriptor(getWriteStreamMethodDescriptor) - .setParamsExtractor( - new RequestParamsExtractor() { - @Override - public Map extract(GetWriteStreamRequest request) { - ImmutableMap.Builder params = ImmutableMap.builder(); - params.put("name", String.valueOf(request.getName())); - return params.build(); - } - }) - .build(); - GrpcCallSettings + GrpcCallSettings + createWriteStreamTransportSettings = + GrpcCallSettings.newBuilder() + .setMethodDescriptor(createWriteStreamMethodDescriptor) + .setParamsExtractor( + new RequestParamsExtractor() { + @Override + public Map extract(Storage.CreateWriteStreamRequest request) { + ImmutableMap.Builder params = ImmutableMap.builder(); + params.put("parent", String.valueOf(request.getParent())); + return params.build(); + } + }) + .build(); + GrpcCallSettings + appendRowsTransportSettings = + GrpcCallSettings.newBuilder() + .setMethodDescriptor(appendRowsMethodDescriptor) + .setParamsExtractor( + new RequestParamsExtractor() { + @Override + public Map extract(Storage.AppendRowsRequest request) { + ImmutableMap.Builder params = ImmutableMap.builder(); + params.put("write_stream", String.valueOf(request.getWriteStream())); + return params.build(); + } + }) + .build(); + GrpcCallSettings + getWriteStreamTransportSettings = + GrpcCallSettings.newBuilder() + .setMethodDescriptor(getWriteStreamMethodDescriptor) + .setParamsExtractor( + new RequestParamsExtractor() { + @Override + public Map extract(Storage.GetWriteStreamRequest request) { + ImmutableMap.Builder params = ImmutableMap.builder(); + params.put("name", String.valueOf(request.getName())); + return params.build(); + } + }) + .build(); + GrpcCallSettings finalizeWriteStreamTransportSettings = - GrpcCallSettings.newBuilder() + GrpcCallSettings + . + newBuilder() .setMethodDescriptor(finalizeWriteStreamMethodDescriptor) .setParamsExtractor( - new RequestParamsExtractor() { + new RequestParamsExtractor() { @Override - public Map extract(FinalizeWriteStreamRequest request) { + public Map extract( + Storage.FinalizeWriteStreamRequest request) { ImmutableMap.Builder params = ImmutableMap.builder(); params.put("name", String.valueOf(request.getName())); return params.build(); } }) .build(); - GrpcCallSettings + GrpcCallSettings< + Storage.BatchCommitWriteStreamsRequest, Storage.BatchCommitWriteStreamsResponse> batchCommitWriteStreamsTransportSettings = GrpcCallSettings - .newBuilder() + . + newBuilder() .setMethodDescriptor(batchCommitWriteStreamsMethodDescriptor) .setParamsExtractor( - new RequestParamsExtractor() { + new RequestParamsExtractor() { @Override - public Map extract(BatchCommitWriteStreamsRequest request) { + public Map extract( + Storage.BatchCommitWriteStreamsRequest request) { ImmutableMap.Builder params = ImmutableMap.builder(); params.put("parent", String.valueOf(request.getParent())); return params.build(); } }) .build(); - GrpcCallSettings flushRowsTransportSettings = - GrpcCallSettings.newBuilder() - .setMethodDescriptor(flushRowsMethodDescriptor) - .setParamsExtractor( - new RequestParamsExtractor() { - @Override - public Map extract(FlushRowsRequest request) { - ImmutableMap.Builder params = ImmutableMap.builder(); - params.put("write_stream", String.valueOf(request.getWriteStream())); - return params.build(); - } - }) - .build(); + GrpcCallSettings + flushRowsTransportSettings = + GrpcCallSettings.newBuilder() + .setMethodDescriptor(flushRowsMethodDescriptor) + .setParamsExtractor( + new RequestParamsExtractor() { + @Override + public Map extract(Storage.FlushRowsRequest request) { + ImmutableMap.Builder params = ImmutableMap.builder(); + params.put("write_stream", String.valueOf(request.getWriteStream())); + return params.build(); + } + }) + .build(); this.createWriteStreamCallable = callableFactory.createUnaryCallable( @@ -262,32 +282,40 @@ public Map extract(FlushRowsRequest request) { callableFactory.createUnaryCallable( flushRowsTransportSettings, settings.flushRowsSettings(), clientContext); - backgroundResources = new BackgroundResourceAggregation(clientContext.getBackgroundResources()); + this.backgroundResources = + new BackgroundResourceAggregation(clientContext.getBackgroundResources()); + } + + public GrpcOperationsStub getOperationsStub() { + return operationsStub; } - public UnaryCallable createWriteStreamCallable() { + public UnaryCallable + createWriteStreamCallable() { return createWriteStreamCallable; } - public BidiStreamingCallable appendRowsCallable() { + public BidiStreamingCallable + appendRowsCallable() { return appendRowsCallable; } - public UnaryCallable getWriteStreamCallable() { + public UnaryCallable getWriteStreamCallable() { return getWriteStreamCallable; } - public UnaryCallable + public UnaryCallable finalizeWriteStreamCallable() { return finalizeWriteStreamCallable; } - public UnaryCallable + public UnaryCallable< + Storage.BatchCommitWriteStreamsRequest, Storage.BatchCommitWriteStreamsResponse> batchCommitWriteStreamsCallable() { return batchCommitWriteStreamsCallable; } - public UnaryCallable flushRowsCallable() { + public UnaryCallable flushRowsCallable() { return flushRowsCallable; } diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta1/BaseBigQueryStorageClient.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta1/BaseBigQueryStorageClient.java index 8f337335b5..5528718f0d 100644 --- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta1/BaseBigQueryStorageClient.java +++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta1/BaseBigQueryStorageClient.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,31 +13,22 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.bigquery.storage.v1beta1; import com.google.api.core.BetaApi; import com.google.api.gax.core.BackgroundResource; import com.google.api.gax.rpc.ServerStreamingCallable; import com.google.api.gax.rpc.UnaryCallable; -import com.google.cloud.bigquery.storage.v1beta1.Storage.BatchCreateReadSessionStreamsRequest; -import com.google.cloud.bigquery.storage.v1beta1.Storage.BatchCreateReadSessionStreamsResponse; -import com.google.cloud.bigquery.storage.v1beta1.Storage.CreateReadSessionRequest; -import com.google.cloud.bigquery.storage.v1beta1.Storage.FinalizeStreamRequest; -import com.google.cloud.bigquery.storage.v1beta1.Storage.ReadRowsRequest; -import com.google.cloud.bigquery.storage.v1beta1.Storage.ReadRowsResponse; -import com.google.cloud.bigquery.storage.v1beta1.Storage.ReadSession; -import com.google.cloud.bigquery.storage.v1beta1.Storage.SplitReadStreamRequest; -import com.google.cloud.bigquery.storage.v1beta1.Storage.SplitReadStreamResponse; -import com.google.cloud.bigquery.storage.v1beta1.Storage.Stream; -import com.google.cloud.bigquery.storage.v1beta1.TableReferenceProto.TableReference; import com.google.cloud.bigquery.storage.v1beta1.stub.BigQueryStorageStub; import com.google.cloud.bigquery.storage.v1beta1.stub.BigQueryStorageStubSettings; import com.google.protobuf.Empty; import java.io.IOException; +import java.util.Objects; import java.util.concurrent.TimeUnit; import javax.annotation.Generated; -// AUTO-GENERATED DOCUMENTATION AND SERVICE +// AUTO-GENERATED DOCUMENTATION AND CLASS. /** * Service Description: BigQuery storage API. * @@ -46,18 +37,7 @@ *

This class provides the ability to make remote calls to the backing service through method * calls that map to API methods. Sample code to get started: * - *

- * 
- * try (BaseBigQueryStorageClient baseBigQueryStorageClient = BaseBigQueryStorageClient.create()) {
- *   TableReference tableReference = TableReference.newBuilder().build();
- *   ProjectName parent = ProjectName.of("[PROJECT]");
- *   int requestedStreams = 0;
- *   ReadSession response = baseBigQueryStorageClient.createReadSession(tableReference, parent, requestedStreams);
- * }
- * 
- * 
- * - *

Note: close() needs to be called on the baseBigQueryStorageClient object to clean up resources + *

Note: close() needs to be called on the BaseBigQueryStorageClient object to clean up resources * such as threads. In the example above, try-with-resources is used, which automatically calls * close(). * @@ -86,30 +66,28 @@ * *

To customize credentials: * - *

- * 
+ * 
{@code
  * BaseBigQueryStorageSettings baseBigQueryStorageSettings =
  *     BaseBigQueryStorageSettings.newBuilder()
  *         .setCredentialsProvider(FixedCredentialsProvider.create(myCredentials))
  *         .build();
  * BaseBigQueryStorageClient baseBigQueryStorageClient =
  *     BaseBigQueryStorageClient.create(baseBigQueryStorageSettings);
- * 
- * 
+ * }
* - * To customize the endpoint: + *

To customize the endpoint: * - *

- * 
+ * 
{@code
  * BaseBigQueryStorageSettings baseBigQueryStorageSettings =
  *     BaseBigQueryStorageSettings.newBuilder().setEndpoint(myEndpoint).build();
  * BaseBigQueryStorageClient baseBigQueryStorageClient =
  *     BaseBigQueryStorageClient.create(baseBigQueryStorageSettings);
- * 
- * 
+ * }
+ * + *

Please refer to the GitHub repository's samples for more quickstart code snippets. */ -@Generated("by gapic-generator") @BetaApi +@Generated("by gapic-generator") public class BaseBigQueryStorageClient implements BackgroundResource { private final BaseBigQueryStorageSettings settings; private final BigQueryStorageStub stub; @@ -130,7 +108,7 @@ public static final BaseBigQueryStorageClient create(BaseBigQueryStorageSettings /** * Constructs an instance of BaseBigQueryStorageClient, using the given stub for making calls. - * This is for advanced usage - prefer to use BaseBigQueryStorageSettings}. + * This is for advanced usage - prefer using create(BaseBigQueryStorageSettings). */ @BetaApi("A restructuring of stub classes is planned, so this may break in the future") public static final BaseBigQueryStorageClient create(BigQueryStorageStub stub) { @@ -162,7 +140,7 @@ public BigQueryStorageStub getStub() { return stub; } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Creates a new read session. A read session divides the contents of a BigQuery table into one or * more streams, which can then be used to read data from the table. The read session also @@ -175,39 +153,28 @@ public BigQueryStorageStub getStub() { *

Read sessions automatically expire 24 hours after they are created and do not require manual * clean-up by the caller. * - *

Sample code: - * - *


-   * try (BaseBigQueryStorageClient baseBigQueryStorageClient = BaseBigQueryStorageClient.create()) {
-   *   TableReference tableReference = TableReference.newBuilder().build();
-   *   ProjectName parent = ProjectName.of("[PROJECT]");
-   *   int requestedStreams = 0;
-   *   ReadSession response = baseBigQueryStorageClient.createReadSession(tableReference, parent, requestedStreams);
-   * }
-   * 
- * - * @param tableReference Required. Reference to the table to read. + * @param table_reference Required. Reference to the table to read. * @param parent Required. String of the form `projects/{project_id}` indicating the project this * ReadSession is associated with. This is the project that will be billed for usage. - * @param requestedStreams Initial number of streams. If unset or 0, we will provide a value of + * @param requested_streams Initial number of streams. If unset or 0, we will provide a value of * streams so as to produce reasonable throughput. Must be non-negative. The number of streams * may be lower than the requested number, depending on the amount parallelism that is * reasonable for the table and the maximum amount of parallelism allowed by the system. *

Streams must be read starting from offset 0. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ - public final ReadSession createReadSession( - TableReference tableReference, ProjectName parent, int requestedStreams) { - CreateReadSessionRequest request = - CreateReadSessionRequest.newBuilder() + public final Storage.ReadSession createReadSession( + TableReferenceProto.TableReference tableReference, ProjectName parent, int requestedStreams) { + Storage.CreateReadSessionRequest request = + Storage.CreateReadSessionRequest.newBuilder() .setTableReference(tableReference) - .setParent(parent == null ? null : parent.toString()) + .setParent(Objects.isNull(parent) ? null : parent.toString()) .setRequestedStreams(requestedStreams) .build(); return createReadSession(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Creates a new read session. A read session divides the contents of a BigQuery table into one or * more streams, which can then be used to read data from the table. The read session also @@ -220,31 +187,20 @@ public final ReadSession createReadSession( *

Read sessions automatically expire 24 hours after they are created and do not require manual * clean-up by the caller. * - *

Sample code: - * - *


-   * try (BaseBigQueryStorageClient baseBigQueryStorageClient = BaseBigQueryStorageClient.create()) {
-   *   TableReference tableReference = TableReference.newBuilder().build();
-   *   ProjectName parent = ProjectName.of("[PROJECT]");
-   *   int requestedStreams = 0;
-   *   ReadSession response = baseBigQueryStorageClient.createReadSession(tableReference, parent.toString(), requestedStreams);
-   * }
-   * 
- * - * @param tableReference Required. Reference to the table to read. + * @param table_reference Required. Reference to the table to read. * @param parent Required. String of the form `projects/{project_id}` indicating the project this * ReadSession is associated with. This is the project that will be billed for usage. - * @param requestedStreams Initial number of streams. If unset or 0, we will provide a value of + * @param requested_streams Initial number of streams. If unset or 0, we will provide a value of * streams so as to produce reasonable throughput. Must be non-negative. The number of streams * may be lower than the requested number, depending on the amount parallelism that is * reasonable for the table and the maximum amount of parallelism allowed by the system. *

Streams must be read starting from offset 0. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ - public final ReadSession createReadSession( - TableReference tableReference, String parent, int requestedStreams) { - CreateReadSessionRequest request = - CreateReadSessionRequest.newBuilder() + public final Storage.ReadSession createReadSession( + TableReferenceProto.TableReference tableReference, String parent, int requestedStreams) { + Storage.CreateReadSessionRequest request = + Storage.CreateReadSessionRequest.newBuilder() .setTableReference(tableReference) .setParent(parent) .setRequestedStreams(requestedStreams) @@ -252,7 +208,7 @@ public final ReadSession createReadSession( return createReadSession(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Creates a new read session. A read session divides the contents of a BigQuery table into one or * more streams, which can then be used to read data from the table. The read session also @@ -265,28 +221,14 @@ public final ReadSession createReadSession( *

Read sessions automatically expire 24 hours after they are created and do not require manual * clean-up by the caller. * - *

Sample code: - * - *


-   * try (BaseBigQueryStorageClient baseBigQueryStorageClient = BaseBigQueryStorageClient.create()) {
-   *   TableReference tableReference = TableReference.newBuilder().build();
-   *   ProjectName parent = ProjectName.of("[PROJECT]");
-   *   CreateReadSessionRequest request = CreateReadSessionRequest.newBuilder()
-   *     .setTableReference(tableReference)
-   *     .setParent(parent.toString())
-   *     .build();
-   *   ReadSession response = baseBigQueryStorageClient.createReadSession(request);
-   * }
-   * 
- * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ - public final ReadSession createReadSession(CreateReadSessionRequest request) { + public final Storage.ReadSession createReadSession(Storage.CreateReadSessionRequest request) { return createReadSessionCallable().call(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Creates a new read session. A read session divides the contents of a BigQuery table into one or * more streams, which can then be used to read data from the table. The read session also @@ -300,26 +242,13 @@ public final ReadSession createReadSession(CreateReadSessionRequest request) { * clean-up by the caller. * *

Sample code: - * - *


-   * try (BaseBigQueryStorageClient baseBigQueryStorageClient = BaseBigQueryStorageClient.create()) {
-   *   TableReference tableReference = TableReference.newBuilder().build();
-   *   ProjectName parent = ProjectName.of("[PROJECT]");
-   *   CreateReadSessionRequest request = CreateReadSessionRequest.newBuilder()
-   *     .setTableReference(tableReference)
-   *     .setParent(parent.toString())
-   *     .build();
-   *   ApiFuture<ReadSession> future = baseBigQueryStorageClient.createReadSessionCallable().futureCall(request);
-   *   // Do something
-   *   ReadSession response = future.get();
-   * }
-   * 
*/ - public final UnaryCallable createReadSessionCallable() { + public final UnaryCallable + createReadSessionCallable() { return stub.createReadSessionCallable(); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Reads rows from the table in the format prescribed by the read session. Each response contains * one or more table rows, up to a maximum of 10 MiB per response; read requests which attempt to @@ -331,111 +260,61 @@ public final UnaryCallable createReadSess * data. * *

Sample code: - * - *


-   * try (BaseBigQueryStorageClient baseBigQueryStorageClient = BaseBigQueryStorageClient.create()) {
-   *   StreamPosition readPosition = StreamPosition.newBuilder().build();
-   *   ReadRowsRequest request = ReadRowsRequest.newBuilder()
-   *     .setReadPosition(readPosition)
-   *     .build();
-   *
-   *   ServerStream<ReadRowsResponse> stream = baseBigQueryStorageClient.readRowsCallable().call(request);
-   *   for (ReadRowsResponse response : stream) {
-   *     // Do something when receive a response
-   *   }
-   * }
-   * 
*/ - public final ServerStreamingCallable readRowsCallable() { + public final ServerStreamingCallable + readRowsCallable() { return stub.readRowsCallable(); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Creates additional streams for a ReadSession. This API can be used to dynamically adjust the * parallelism of a batch processing task upwards by adding additional workers. * - *

Sample code: - * - *


-   * try (BaseBigQueryStorageClient baseBigQueryStorageClient = BaseBigQueryStorageClient.create()) {
-   *   ReadSession session = ReadSession.newBuilder().build();
-   *   int requestedStreams = 0;
-   *   BatchCreateReadSessionStreamsResponse response = baseBigQueryStorageClient.batchCreateReadSessionStreams(session, requestedStreams);
-   * }
-   * 
- * * @param session Required. Must be a non-expired session obtained from a call to * CreateReadSession. Only the name field needs to be set. - * @param requestedStreams Required. Number of new streams requested. Must be positive. Number of + * @param requested_streams Required. Number of new streams requested. Must be positive. Number of * added streams may be less than this, see CreateReadSessionRequest for more information. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ - public final BatchCreateReadSessionStreamsResponse batchCreateReadSessionStreams( - ReadSession session, int requestedStreams) { - BatchCreateReadSessionStreamsRequest request = - BatchCreateReadSessionStreamsRequest.newBuilder() + public final Storage.BatchCreateReadSessionStreamsResponse batchCreateReadSessionStreams( + Storage.ReadSession session, int requestedStreams) { + Storage.BatchCreateReadSessionStreamsRequest request = + Storage.BatchCreateReadSessionStreamsRequest.newBuilder() .setSession(session) .setRequestedStreams(requestedStreams) .build(); return batchCreateReadSessionStreams(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Creates additional streams for a ReadSession. This API can be used to dynamically adjust the * parallelism of a batch processing task upwards by adding additional workers. * - *

Sample code: - * - *


-   * try (BaseBigQueryStorageClient baseBigQueryStorageClient = BaseBigQueryStorageClient.create()) {
-   *   ReadSession session = ReadSession.newBuilder().build();
-   *   int requestedStreams = 0;
-   *   BatchCreateReadSessionStreamsRequest request = BatchCreateReadSessionStreamsRequest.newBuilder()
-   *     .setSession(session)
-   *     .setRequestedStreams(requestedStreams)
-   *     .build();
-   *   BatchCreateReadSessionStreamsResponse response = baseBigQueryStorageClient.batchCreateReadSessionStreams(request);
-   * }
-   * 
- * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ - public final BatchCreateReadSessionStreamsResponse batchCreateReadSessionStreams( - BatchCreateReadSessionStreamsRequest request) { + public final Storage.BatchCreateReadSessionStreamsResponse batchCreateReadSessionStreams( + Storage.BatchCreateReadSessionStreamsRequest request) { return batchCreateReadSessionStreamsCallable().call(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Creates additional streams for a ReadSession. This API can be used to dynamically adjust the * parallelism of a batch processing task upwards by adding additional workers. * *

Sample code: - * - *


-   * try (BaseBigQueryStorageClient baseBigQueryStorageClient = BaseBigQueryStorageClient.create()) {
-   *   ReadSession session = ReadSession.newBuilder().build();
-   *   int requestedStreams = 0;
-   *   BatchCreateReadSessionStreamsRequest request = BatchCreateReadSessionStreamsRequest.newBuilder()
-   *     .setSession(session)
-   *     .setRequestedStreams(requestedStreams)
-   *     .build();
-   *   ApiFuture<BatchCreateReadSessionStreamsResponse> future = baseBigQueryStorageClient.batchCreateReadSessionStreamsCallable().futureCall(request);
-   *   // Do something
-   *   BatchCreateReadSessionStreamsResponse response = future.get();
-   * }
-   * 
*/ public final UnaryCallable< - BatchCreateReadSessionStreamsRequest, BatchCreateReadSessionStreamsResponse> + Storage.BatchCreateReadSessionStreamsRequest, + Storage.BatchCreateReadSessionStreamsResponse> batchCreateReadSessionStreamsCallable() { return stub.batchCreateReadSessionStreamsCallable(); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Triggers the graceful termination of a single stream in a ReadSession. This API can be used to * dynamically adjust the parallelism of a batch processing task downwards without losing data. @@ -449,24 +328,16 @@ public final BatchCreateReadSessionStreamsResponse batchCreateReadSessionStreams *

This method will return an error if there are no other live streams in the Session, or if * SplitReadStream() has been called on the given Stream. * - *

Sample code: - * - *


-   * try (BaseBigQueryStorageClient baseBigQueryStorageClient = BaseBigQueryStorageClient.create()) {
-   *   Stream stream = Stream.newBuilder().build();
-   *   baseBigQueryStorageClient.finalizeStream(stream);
-   * }
-   * 
- * * @param stream Required. Stream to finalize. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ - public final void finalizeStream(Stream stream) { - FinalizeStreamRequest request = FinalizeStreamRequest.newBuilder().setStream(stream).build(); + public final void finalizeStream(Storage.Stream stream) { + Storage.FinalizeStreamRequest request = + Storage.FinalizeStreamRequest.newBuilder().setStream(stream).build(); finalizeStream(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Triggers the graceful termination of a single stream in a ReadSession. This API can be used to * dynamically adjust the parallelism of a batch processing task downwards without losing data. @@ -480,26 +351,14 @@ public final void finalizeStream(Stream stream) { *

This method will return an error if there are no other live streams in the Session, or if * SplitReadStream() has been called on the given Stream. * - *

Sample code: - * - *


-   * try (BaseBigQueryStorageClient baseBigQueryStorageClient = BaseBigQueryStorageClient.create()) {
-   *   Stream stream = Stream.newBuilder().build();
-   *   FinalizeStreamRequest request = FinalizeStreamRequest.newBuilder()
-   *     .setStream(stream)
-   *     .build();
-   *   baseBigQueryStorageClient.finalizeStream(request);
-   * }
-   * 
- * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ - public final void finalizeStream(FinalizeStreamRequest request) { + public final void finalizeStream(Storage.FinalizeStreamRequest request) { finalizeStreamCallable().call(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Triggers the graceful termination of a single stream in a ReadSession. This API can be used to * dynamically adjust the parallelism of a batch processing task downwards without losing data. @@ -514,24 +373,12 @@ public final void finalizeStream(FinalizeStreamRequest request) { * SplitReadStream() has been called on the given Stream. * *

Sample code: - * - *


-   * try (BaseBigQueryStorageClient baseBigQueryStorageClient = BaseBigQueryStorageClient.create()) {
-   *   Stream stream = Stream.newBuilder().build();
-   *   FinalizeStreamRequest request = FinalizeStreamRequest.newBuilder()
-   *     .setStream(stream)
-   *     .build();
-   *   ApiFuture<Void> future = baseBigQueryStorageClient.finalizeStreamCallable().futureCall(request);
-   *   // Do something
-   *   future.get();
-   * }
-   * 
*/ - public final UnaryCallable finalizeStreamCallable() { + public final UnaryCallable finalizeStreamCallable() { return stub.finalizeStreamCallable(); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Splits a given read stream into two Streams. These streams are referred to as the primary and * the residual of the split. The original stream can still be read from in the same manner as @@ -545,25 +392,16 @@ public final UnaryCallable finalizeStreamCallable( * *

This method is guaranteed to be idempotent. * - *

Sample code: - * - *


-   * try (BaseBigQueryStorageClient baseBigQueryStorageClient = BaseBigQueryStorageClient.create()) {
-   *   Stream originalStream = Stream.newBuilder().build();
-   *   SplitReadStreamResponse response = baseBigQueryStorageClient.splitReadStream(originalStream);
-   * }
-   * 
- * - * @param originalStream Required. Stream to split. + * @param original_stream Required. Stream to split. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ - public final SplitReadStreamResponse splitReadStream(Stream originalStream) { - SplitReadStreamRequest request = - SplitReadStreamRequest.newBuilder().setOriginalStream(originalStream).build(); + public final Storage.SplitReadStreamResponse splitReadStream(Storage.Stream originalStream) { + Storage.SplitReadStreamRequest request = + Storage.SplitReadStreamRequest.newBuilder().setOriginalStream(originalStream).build(); return splitReadStream(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Splits a given read stream into two Streams. These streams are referred to as the primary and * the residual of the split. The original stream can still be read from in the same manner as @@ -577,26 +415,15 @@ public final SplitReadStreamResponse splitReadStream(Stream originalStream) { * *

This method is guaranteed to be idempotent. * - *

Sample code: - * - *


-   * try (BaseBigQueryStorageClient baseBigQueryStorageClient = BaseBigQueryStorageClient.create()) {
-   *   Stream originalStream = Stream.newBuilder().build();
-   *   SplitReadStreamRequest request = SplitReadStreamRequest.newBuilder()
-   *     .setOriginalStream(originalStream)
-   *     .build();
-   *   SplitReadStreamResponse response = baseBigQueryStorageClient.splitReadStream(request);
-   * }
-   * 
- * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ - public final SplitReadStreamResponse splitReadStream(SplitReadStreamRequest request) { + public final Storage.SplitReadStreamResponse splitReadStream( + Storage.SplitReadStreamRequest request) { return splitReadStreamCallable().call(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Splits a given read stream into two Streams. These streams are referred to as the primary and * the residual of the split. The original stream can still be read from in the same manner as @@ -611,20 +438,8 @@ public final SplitReadStreamResponse splitReadStream(SplitReadStreamRequest requ *

This method is guaranteed to be idempotent. * *

Sample code: - * - *


-   * try (BaseBigQueryStorageClient baseBigQueryStorageClient = BaseBigQueryStorageClient.create()) {
-   *   Stream originalStream = Stream.newBuilder().build();
-   *   SplitReadStreamRequest request = SplitReadStreamRequest.newBuilder()
-   *     .setOriginalStream(originalStream)
-   *     .build();
-   *   ApiFuture<SplitReadStreamResponse> future = baseBigQueryStorageClient.splitReadStreamCallable().futureCall(request);
-   *   // Do something
-   *   SplitReadStreamResponse response = future.get();
-   * }
-   * 
*/ - public final UnaryCallable + public final UnaryCallable splitReadStreamCallable() { return stub.splitReadStreamCallable(); } diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta1/BaseBigQueryStorageSettings.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta1/BaseBigQueryStorageSettings.java index a1bb6b456e..73b6bf5729 100644 --- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta1/BaseBigQueryStorageSettings.java +++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta1/BaseBigQueryStorageSettings.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,6 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.bigquery.storage.v1beta1; import com.google.api.core.ApiFunction; @@ -26,22 +27,13 @@ import com.google.api.gax.rpc.ServerStreamingCallSettings; import com.google.api.gax.rpc.TransportChannelProvider; import com.google.api.gax.rpc.UnaryCallSettings; -import com.google.cloud.bigquery.storage.v1beta1.Storage.BatchCreateReadSessionStreamsRequest; -import com.google.cloud.bigquery.storage.v1beta1.Storage.BatchCreateReadSessionStreamsResponse; -import com.google.cloud.bigquery.storage.v1beta1.Storage.CreateReadSessionRequest; -import com.google.cloud.bigquery.storage.v1beta1.Storage.FinalizeStreamRequest; -import com.google.cloud.bigquery.storage.v1beta1.Storage.ReadRowsRequest; -import com.google.cloud.bigquery.storage.v1beta1.Storage.ReadRowsResponse; -import com.google.cloud.bigquery.storage.v1beta1.Storage.ReadSession; -import com.google.cloud.bigquery.storage.v1beta1.Storage.SplitReadStreamRequest; -import com.google.cloud.bigquery.storage.v1beta1.Storage.SplitReadStreamResponse; import com.google.cloud.bigquery.storage.v1beta1.stub.BigQueryStorageStubSettings; import com.google.protobuf.Empty; import java.io.IOException; import java.util.List; import javax.annotation.Generated; -// AUTO-GENERATED DOCUMENTATION AND CLASS +// AUTO-GENERATED DOCUMENTATION AND CLASS. /** * Settings class to configure an instance of {@link BaseBigQueryStorageClient}. * @@ -59,48 +51,53 @@ * *

For example, to set the total timeout of createReadSession to 30 seconds: * - *

- * 
+ * 
{@code
  * BaseBigQueryStorageSettings.Builder baseBigQueryStorageSettingsBuilder =
  *     BaseBigQueryStorageSettings.newBuilder();
  * baseBigQueryStorageSettingsBuilder
  *     .createReadSessionSettings()
  *     .setRetrySettings(
- *         baseBigQueryStorageSettingsBuilder.createReadSessionSettings().getRetrySettings().toBuilder()
+ *         baseBigQueryStorageSettingsBuilder
+ *             .createReadSessionSettings()
+ *             .getRetrySettings()
+ *             .toBuilder()
  *             .setTotalTimeout(Duration.ofSeconds(30))
  *             .build());
- * BaseBigQueryStorageSettings baseBigQueryStorageSettings = baseBigQueryStorageSettingsBuilder.build();
- * 
- * 
+ * BaseBigQueryStorageSettings baseBigQueryStorageSettings = + * baseBigQueryStorageSettingsBuilder.build(); + * }
*/ -@Generated("by gapic-generator") -@BetaApi +@Generated("by gapic-generator-java") public class BaseBigQueryStorageSettings extends ClientSettings { + /** Returns the object with the settings used for calls to createReadSession. */ - public UnaryCallSettings createReadSessionSettings() { + public UnaryCallSettings + createReadSessionSettings() { return ((BigQueryStorageStubSettings) getStubSettings()).createReadSessionSettings(); } /** Returns the object with the settings used for calls to readRows. */ - public ServerStreamingCallSettings readRowsSettings() { + public ServerStreamingCallSettings + readRowsSettings() { return ((BigQueryStorageStubSettings) getStubSettings()).readRowsSettings(); } /** Returns the object with the settings used for calls to batchCreateReadSessionStreams. */ public UnaryCallSettings< - BatchCreateReadSessionStreamsRequest, BatchCreateReadSessionStreamsResponse> + Storage.BatchCreateReadSessionStreamsRequest, + Storage.BatchCreateReadSessionStreamsResponse> batchCreateReadSessionStreamsSettings() { return ((BigQueryStorageStubSettings) getStubSettings()) .batchCreateReadSessionStreamsSettings(); } /** Returns the object with the settings used for calls to finalizeStream. */ - public UnaryCallSettings finalizeStreamSettings() { + public UnaryCallSettings finalizeStreamSettings() { return ((BigQueryStorageStubSettings) getStubSettings()).finalizeStreamSettings(); } /** Returns the object with the settings used for calls to splitReadStream. */ - public UnaryCallSettings + public UnaryCallSettings splitReadStreamSettings() { return ((BigQueryStorageStubSettings) getStubSettings()).splitReadStreamSettings(); } @@ -165,18 +162,15 @@ protected BaseBigQueryStorageSettings(Builder settingsBuilder) throws IOExceptio /** Builder for BaseBigQueryStorageSettings. */ public static class Builder extends ClientSettings.Builder { + protected Builder() throws IOException { - this((ClientContext) null); + this(((ClientContext) null)); } protected Builder(ClientContext clientContext) { super(BigQueryStorageStubSettings.newBuilder(clientContext)); } - private static Builder createDefault() { - return new Builder(BigQueryStorageStubSettings.newBuilder()); - } - protected Builder(BaseBigQueryStorageSettings settings) { super(settings.getStubSettings().toBuilder()); } @@ -185,11 +179,15 @@ protected Builder(BigQueryStorageStubSettings.Builder stubSettings) { super(stubSettings); } + private static Builder createDefault() { + return new Builder(BigQueryStorageStubSettings.newBuilder()); + } + public BigQueryStorageStubSettings.Builder getStubSettingsBuilder() { return ((BigQueryStorageStubSettings.Builder) getStubSettings()); } - // NEXT_MAJOR_VER: remove 'throws Exception' + // NEXT_MAJOR_VER: remove 'throws Exception'. /** * Applies the given settings updater function to all of the unary API methods in this service. * @@ -203,31 +201,34 @@ public Builder applyToAllUnaryMethods( } /** Returns the builder for the settings used for calls to createReadSession. */ - public UnaryCallSettings.Builder + public UnaryCallSettings.Builder createReadSessionSettings() { return getStubSettingsBuilder().createReadSessionSettings(); } /** Returns the builder for the settings used for calls to readRows. */ - public ServerStreamingCallSettings.Builder + public ServerStreamingCallSettings.Builder readRowsSettings() { return getStubSettingsBuilder().readRowsSettings(); } /** Returns the builder for the settings used for calls to batchCreateReadSessionStreams. */ public UnaryCallSettings.Builder< - BatchCreateReadSessionStreamsRequest, BatchCreateReadSessionStreamsResponse> + Storage.BatchCreateReadSessionStreamsRequest, + Storage.BatchCreateReadSessionStreamsResponse> batchCreateReadSessionStreamsSettings() { return getStubSettingsBuilder().batchCreateReadSessionStreamsSettings(); } /** Returns the builder for the settings used for calls to finalizeStream. */ - public UnaryCallSettings.Builder finalizeStreamSettings() { + public UnaryCallSettings.Builder + finalizeStreamSettings() { return getStubSettingsBuilder().finalizeStreamSettings(); } /** Returns the builder for the settings used for calls to splitReadStream. */ - public UnaryCallSettings.Builder + public UnaryCallSettings.Builder< + Storage.SplitReadStreamRequest, Storage.SplitReadStreamResponse> splitReadStreamSettings() { return getStubSettingsBuilder().splitReadStreamSettings(); } diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta1/package-info.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta1/package-info.java index 5c0d3b601e..eff4878eee 100644 --- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta1/package-info.java +++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta1/package-info.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,30 +15,17 @@ */ /** - * A client to BigQuery Storage API. + * The interfaces provided are listed below, along with usage samples. * - *

The interfaces provided are listed below, along with usage samples. - * - *

========================= BaseBigQueryStorageClient ========================= + *

======================= BigQueryStorageClient ======================= * *

Service Description: BigQuery storage API. * *

The BigQuery storage API can be used to read data stored in BigQuery. * - *

Sample for BaseBigQueryStorageClient: - * - *

- * 
- * try (BaseBigQueryStorageClient baseBigQueryStorageClient = BaseBigQueryStorageClient.create()) {
- *   TableReference tableReference = TableReference.newBuilder().build();
- *   ProjectName parent = ProjectName.of("[PROJECT]");
- *   int requestedStreams = 0;
- *   ReadSession response = baseBigQueryStorageClient.createReadSession(tableReference, parent, requestedStreams);
- * }
- * 
- * 
+ *

Sample for BigQueryStorageClient: */ -@Generated("by gapic-generator") +@Generated("by gapic-generator-java") package com.google.cloud.bigquery.storage.v1beta1; import javax.annotation.Generated; diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta1/stub/BigQueryStorageStub.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta1/stub/BigQueryStorageStub.java index 2d806771cb..d7f64bde10 100644 --- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta1/stub/BigQueryStorageStub.java +++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta1/stub/BigQueryStorageStub.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,53 +13,49 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.bigquery.storage.v1beta1.stub; -import com.google.api.core.BetaApi; import com.google.api.gax.core.BackgroundResource; import com.google.api.gax.rpc.ServerStreamingCallable; import com.google.api.gax.rpc.UnaryCallable; -import com.google.cloud.bigquery.storage.v1beta1.Storage.BatchCreateReadSessionStreamsRequest; -import com.google.cloud.bigquery.storage.v1beta1.Storage.BatchCreateReadSessionStreamsResponse; -import com.google.cloud.bigquery.storage.v1beta1.Storage.CreateReadSessionRequest; -import com.google.cloud.bigquery.storage.v1beta1.Storage.FinalizeStreamRequest; -import com.google.cloud.bigquery.storage.v1beta1.Storage.ReadRowsRequest; -import com.google.cloud.bigquery.storage.v1beta1.Storage.ReadRowsResponse; -import com.google.cloud.bigquery.storage.v1beta1.Storage.ReadSession; -import com.google.cloud.bigquery.storage.v1beta1.Storage.SplitReadStreamRequest; -import com.google.cloud.bigquery.storage.v1beta1.Storage.SplitReadStreamResponse; +import com.google.cloud.bigquery.storage.v1beta1.Storage; import com.google.protobuf.Empty; import javax.annotation.Generated; -// AUTO-GENERATED DOCUMENTATION AND CLASS +// AUTO-GENERATED DOCUMENTATION AND CLASS. /** - * Base stub class for BigQuery Storage API. + * Base stub class for the BigQueryStorage service API. * *

This class is for advanced usage and reflects the underlying API directly. */ @Generated("by gapic-generator") -@BetaApi("A restructuring of stub classes is planned, so this may break in the future") public abstract class BigQueryStorageStub implements BackgroundResource { - public UnaryCallable createReadSessionCallable() { + public UnaryCallable + createReadSessionCallable() { throw new UnsupportedOperationException("Not implemented: createReadSessionCallable()"); } - public ServerStreamingCallable readRowsCallable() { + public ServerStreamingCallable + readRowsCallable() { throw new UnsupportedOperationException("Not implemented: readRowsCallable()"); } - public UnaryCallable + public UnaryCallable< + Storage.BatchCreateReadSessionStreamsRequest, + Storage.BatchCreateReadSessionStreamsResponse> batchCreateReadSessionStreamsCallable() { throw new UnsupportedOperationException( "Not implemented: batchCreateReadSessionStreamsCallable()"); } - public UnaryCallable finalizeStreamCallable() { + public UnaryCallable finalizeStreamCallable() { throw new UnsupportedOperationException("Not implemented: finalizeStreamCallable()"); } - public UnaryCallable splitReadStreamCallable() { + public UnaryCallable + splitReadStreamCallable() { throw new UnsupportedOperationException("Not implemented: splitReadStreamCallable()"); } diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta1/stub/BigQueryStorageStubSettings.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta1/stub/BigQueryStorageStubSettings.java index 1cf3ac0ccc..5225c25b98 100644 --- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta1/stub/BigQueryStorageStubSettings.java +++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta1/stub/BigQueryStorageStubSettings.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,6 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.bigquery.storage.v1beta1.stub; import com.google.api.core.ApiFunction; @@ -31,15 +32,7 @@ import com.google.api.gax.rpc.StubSettings; import com.google.api.gax.rpc.TransportChannelProvider; import com.google.api.gax.rpc.UnaryCallSettings; -import com.google.cloud.bigquery.storage.v1beta1.Storage.BatchCreateReadSessionStreamsRequest; -import com.google.cloud.bigquery.storage.v1beta1.Storage.BatchCreateReadSessionStreamsResponse; -import com.google.cloud.bigquery.storage.v1beta1.Storage.CreateReadSessionRequest; -import com.google.cloud.bigquery.storage.v1beta1.Storage.FinalizeStreamRequest; -import com.google.cloud.bigquery.storage.v1beta1.Storage.ReadRowsRequest; -import com.google.cloud.bigquery.storage.v1beta1.Storage.ReadRowsResponse; -import com.google.cloud.bigquery.storage.v1beta1.Storage.ReadSession; -import com.google.cloud.bigquery.storage.v1beta1.Storage.SplitReadStreamRequest; -import com.google.cloud.bigquery.storage.v1beta1.Storage.SplitReadStreamResponse; +import com.google.cloud.bigquery.storage.v1beta1.Storage; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; @@ -50,7 +43,7 @@ import javax.annotation.Generated; import org.threeten.bp.Duration; -// AUTO-GENERATED DOCUMENTATION AND CLASS +// AUTO-GENERATED DOCUMENTATION AND CLASS. /** * Settings class to configure an instance of {@link BigQueryStorageStub}. * @@ -68,22 +61,23 @@ * *

For example, to set the total timeout of createReadSession to 30 seconds: * - *

- * 
- * BigQueryStorageStubSettings.Builder baseBigQueryStorageSettingsBuilder =
+ * 
{@code
+ * BigQueryStorageStubSettings.Builder bigQueryStorageSettingsBuilder =
  *     BigQueryStorageStubSettings.newBuilder();
- * baseBigQueryStorageSettingsBuilder
+ * bigQueryStorageSettingsBuilder
  *     .createReadSessionSettings()
  *     .setRetrySettings(
- *         baseBigQueryStorageSettingsBuilder.createReadSessionSettings().getRetrySettings().toBuilder()
+ *         bigQueryStorageSettingsBuilder
+ *             .createReadSessionSettings()
+ *             .getRetrySettings()
+ *             .toBuilder()
  *             .setTotalTimeout(Duration.ofSeconds(30))
  *             .build());
- * BigQueryStorageStubSettings baseBigQueryStorageSettings = baseBigQueryStorageSettingsBuilder.build();
- * 
- * 
+ * BigQueryStorageStubSettings bigQueryStorageSettings = bigQueryStorageSettingsBuilder.build(); + * }
*/ -@Generated("by gapic-generator") @BetaApi +@Generated("by gapic-generator-java") public class BigQueryStorageStubSettings extends StubSettings { /** The default scopes of the service. */ private static final ImmutableList DEFAULT_SERVICE_SCOPES = @@ -93,39 +87,45 @@ public class BigQueryStorageStubSettings extends StubSettings createReadSessionSettings; - private final ServerStreamingCallSettings readRowsSettings; + private final UnaryCallSettings + createReadSessionSettings; + private final ServerStreamingCallSettings + readRowsSettings; private final UnaryCallSettings< - BatchCreateReadSessionStreamsRequest, BatchCreateReadSessionStreamsResponse> + Storage.BatchCreateReadSessionStreamsRequest, + Storage.BatchCreateReadSessionStreamsResponse> batchCreateReadSessionStreamsSettings; - private final UnaryCallSettings finalizeStreamSettings; - private final UnaryCallSettings + private final UnaryCallSettings finalizeStreamSettings; + private final UnaryCallSettings splitReadStreamSettings; /** Returns the object with the settings used for calls to createReadSession. */ - public UnaryCallSettings createReadSessionSettings() { + public UnaryCallSettings + createReadSessionSettings() { return createReadSessionSettings; } /** Returns the object with the settings used for calls to readRows. */ - public ServerStreamingCallSettings readRowsSettings() { + public ServerStreamingCallSettings + readRowsSettings() { return readRowsSettings; } /** Returns the object with the settings used for calls to batchCreateReadSessionStreams. */ public UnaryCallSettings< - BatchCreateReadSessionStreamsRequest, BatchCreateReadSessionStreamsResponse> + Storage.BatchCreateReadSessionStreamsRequest, + Storage.BatchCreateReadSessionStreamsResponse> batchCreateReadSessionStreamsSettings() { return batchCreateReadSessionStreamsSettings; } /** Returns the object with the settings used for calls to finalizeStream. */ - public UnaryCallSettings finalizeStreamSettings() { + public UnaryCallSettings finalizeStreamSettings() { return finalizeStreamSettings; } /** Returns the object with the settings used for calls to splitReadStream. */ - public UnaryCallSettings + public UnaryCallSettings splitReadStreamSettings() { return splitReadStreamSettings; } @@ -136,10 +136,10 @@ public BigQueryStorageStub createStub() throws IOException { .getTransportName() .equals(GrpcTransportChannel.getGrpcTransportName())) { return GrpcBigQueryStorageStub.create(this); - } else { - throw new UnsupportedOperationException( - "Transport not supported: " + getTransportChannelProvider().getTransportName()); } + throw new UnsupportedOperationException( + String.format( + "Transport not supported: %s", getTransportChannelProvider().getTransportName())); } /** Returns a builder for the default ExecutorProvider for this service. */ @@ -210,18 +210,20 @@ protected BigQueryStorageStubSettings(Builder settingsBuilder) throws IOExceptio /** Builder for BigQueryStorageStubSettings. */ public static class Builder extends StubSettings.Builder { private final ImmutableList> unaryMethodSettingsBuilders; - - private final UnaryCallSettings.Builder + private final UnaryCallSettings.Builder createReadSessionSettings; - private final ServerStreamingCallSettings.Builder + private final ServerStreamingCallSettings.Builder< + Storage.ReadRowsRequest, Storage.ReadRowsResponse> readRowsSettings; private final UnaryCallSettings.Builder< - BatchCreateReadSessionStreamsRequest, BatchCreateReadSessionStreamsResponse> + Storage.BatchCreateReadSessionStreamsRequest, + Storage.BatchCreateReadSessionStreamsResponse> batchCreateReadSessionStreamsSettings; - private final UnaryCallSettings.Builder finalizeStreamSettings; - private final UnaryCallSettings.Builder + private final UnaryCallSettings.Builder + finalizeStreamSettings; + private final UnaryCallSettings.Builder< + Storage.SplitReadStreamRequest, Storage.SplitReadStreamResponse> splitReadStreamSettings; - private static final ImmutableMap> RETRYABLE_CODE_DEFINITIONS; @@ -229,19 +231,18 @@ public static class Builder extends StubSettings.Builder> definitions = ImmutableMap.builder(); definitions.put( - "retry_policy_1_codes", + "retry_policy_0_codes", ImmutableSet.copyOf( Lists.newArrayList( StatusCode.Code.DEADLINE_EXCEEDED, StatusCode.Code.UNAVAILABLE))); - definitions.put("no_retry_codes", ImmutableSet.copyOf(Lists.newArrayList())); definitions.put( - "retry_policy_3_codes", + "retry_policy_1_codes", + ImmutableSet.copyOf(Lists.newArrayList(StatusCode.Code.UNAVAILABLE))); + definitions.put( + "retry_policy_2_codes", ImmutableSet.copyOf( Lists.newArrayList( StatusCode.Code.DEADLINE_EXCEEDED, StatusCode.Code.UNAVAILABLE))); - definitions.put( - "retry_policy_2_codes", - ImmutableSet.copyOf(Lists.newArrayList(StatusCode.Code.UNAVAILABLE))); RETRYABLE_CODE_DEFINITIONS = definitions.build(); } @@ -260,7 +261,7 @@ public static class Builder extends StubSettings.Builder>of( + createReadSessionSettings, + batchCreateReadSessionStreamsSettings, + finalizeStreamSettings, + splitReadStreamSettings); + } + private static Builder createDefault() { - Builder builder = new Builder((ClientContext) null); + Builder builder = new Builder(((ClientContext) null)); + builder.setTransportChannelProvider(defaultTransportChannelProvider()); builder.setCredentialsProvider(defaultCredentialsProviderBuilder().build()); builder.setInternalHeaderProvider(defaultApiClientHeaderProviderBuilder().build()); builder.setEndpoint(getDefaultEndpoint()); + return initDefaults(builder); } private static Builder initDefaults(Builder builder) { - builder .createReadSessionSettings() - .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_1_codes")) - .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_1_params")); + .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) + .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")); builder .readRowsSettings() - .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_2_codes")) - .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_2_params")); + .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_1_codes")) + .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_1_params")); builder .batchCreateReadSessionStreamsSettings() - .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_3_codes")) - .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_3_params")); + .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_2_codes")) + .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_2_params")); builder .finalizeStreamSettings() - .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_3_codes")) - .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_3_params")); + .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_2_codes")) + .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_2_params")); builder .splitReadStreamSettings() - .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_3_codes")) - .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_3_params")); + .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_2_codes")) + .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_2_params")); return builder; } - protected Builder(BigQueryStorageStubSettings settings) { - super(settings); - - createReadSessionSettings = settings.createReadSessionSettings.toBuilder(); - readRowsSettings = settings.readRowsSettings.toBuilder(); - batchCreateReadSessionStreamsSettings = - settings.batchCreateReadSessionStreamsSettings.toBuilder(); - finalizeStreamSettings = settings.finalizeStreamSettings.toBuilder(); - splitReadStreamSettings = settings.splitReadStreamSettings.toBuilder(); - - unaryMethodSettingsBuilders = - ImmutableList.>of( - createReadSessionSettings, - batchCreateReadSessionStreamsSettings, - finalizeStreamSettings, - splitReadStreamSettings); - } - - // NEXT_MAJOR_VER: remove 'throws Exception' + // NEXT_MAJOR_VER: remove 'throws Exception'. /** * Applies the given settings updater function to all of the unary API methods in this service. * @@ -389,31 +384,34 @@ public Builder applyToAllUnaryMethods( } /** Returns the builder for the settings used for calls to createReadSession. */ - public UnaryCallSettings.Builder + public UnaryCallSettings.Builder createReadSessionSettings() { return createReadSessionSettings; } /** Returns the builder for the settings used for calls to readRows. */ - public ServerStreamingCallSettings.Builder + public ServerStreamingCallSettings.Builder readRowsSettings() { return readRowsSettings; } /** Returns the builder for the settings used for calls to batchCreateReadSessionStreams. */ public UnaryCallSettings.Builder< - BatchCreateReadSessionStreamsRequest, BatchCreateReadSessionStreamsResponse> + Storage.BatchCreateReadSessionStreamsRequest, + Storage.BatchCreateReadSessionStreamsResponse> batchCreateReadSessionStreamsSettings() { return batchCreateReadSessionStreamsSettings; } /** Returns the builder for the settings used for calls to finalizeStream. */ - public UnaryCallSettings.Builder finalizeStreamSettings() { + public UnaryCallSettings.Builder + finalizeStreamSettings() { return finalizeStreamSettings; } /** Returns the builder for the settings used for calls to splitReadStream. */ - public UnaryCallSettings.Builder + public UnaryCallSettings.Builder< + Storage.SplitReadStreamRequest, Storage.SplitReadStreamResponse> splitReadStreamSettings() { return splitReadStreamSettings; } diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta1/stub/GrpcBigQueryStorageCallableFactory.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta1/stub/GrpcBigQueryStorageCallableFactory.java index 16a6b42c5f..4cf9880b97 100644 --- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta1/stub/GrpcBigQueryStorageCallableFactory.java +++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta1/stub/GrpcBigQueryStorageCallableFactory.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,9 +13,9 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.bigquery.storage.v1beta1.stub; -import com.google.api.core.BetaApi; import com.google.api.gax.grpc.GrpcCallSettings; import com.google.api.gax.grpc.GrpcCallableFactory; import com.google.api.gax.grpc.GrpcStubCallableFactory; @@ -31,18 +31,19 @@ import com.google.api.gax.rpc.StreamingCallSettings; import com.google.api.gax.rpc.UnaryCallSettings; import com.google.api.gax.rpc.UnaryCallable; +import com.google.longrunning.Operation; import com.google.longrunning.stub.OperationsStub; import javax.annotation.Generated; -// AUTO-GENERATED DOCUMENTATION AND CLASS +// AUTO-GENERATED DOCUMENTATION AND CLASS. /** - * gRPC callable factory implementation for BigQuery Storage API. + * gRPC callable factory implementation for the BigQueryStorage service API. * *

This class is for advanced usage. */ @Generated("by gapic-generator") -@BetaApi("The surface for use by generated code is not stable yet and may change in the future.") public class GrpcBigQueryStorageCallableFactory implements GrpcStubCallableFactory { + @Override public UnaryCallable createUnaryCallable( GrpcCallSettings grpcCallSettings, @@ -55,61 +56,58 @@ public UnaryCallable createUnaryCalla public UnaryCallable createPagedCallable( GrpcCallSettings grpcCallSettings, - PagedCallSettings pagedCallSettings, + PagedCallSettings callSettings, ClientContext clientContext) { - return GrpcCallableFactory.createPagedCallable( - grpcCallSettings, pagedCallSettings, clientContext); + return GrpcCallableFactory.createPagedCallable(grpcCallSettings, callSettings, clientContext); } @Override public UnaryCallable createBatchingCallable( GrpcCallSettings grpcCallSettings, - BatchingCallSettings batchingCallSettings, + BatchingCallSettings callSettings, ClientContext clientContext) { return GrpcCallableFactory.createBatchingCallable( - grpcCallSettings, batchingCallSettings, clientContext); + grpcCallSettings, callSettings, clientContext); } - @BetaApi( - "The surface for long-running operations is not stable yet and may change in the future.") @Override public OperationCallable createOperationCallable( - GrpcCallSettings grpcCallSettings, - OperationCallSettings operationCallSettings, + GrpcCallSettings grpcCallSettings, + OperationCallSettings callSettings, ClientContext clientContext, OperationsStub operationsStub) { return GrpcCallableFactory.createOperationCallable( - grpcCallSettings, operationCallSettings, clientContext, operationsStub); + grpcCallSettings, callSettings, clientContext, operationsStub); } @Override public BidiStreamingCallable createBidiStreamingCallable( GrpcCallSettings grpcCallSettings, - StreamingCallSettings streamingCallSettings, + StreamingCallSettings callSettings, ClientContext clientContext) { return GrpcCallableFactory.createBidiStreamingCallable( - grpcCallSettings, streamingCallSettings, clientContext); + grpcCallSettings, callSettings, clientContext); } @Override public ServerStreamingCallable createServerStreamingCallable( GrpcCallSettings grpcCallSettings, - ServerStreamingCallSettings streamingCallSettings, + ServerStreamingCallSettings callSettings, ClientContext clientContext) { return GrpcCallableFactory.createServerStreamingCallable( - grpcCallSettings, streamingCallSettings, clientContext); + grpcCallSettings, callSettings, clientContext); } @Override public ClientStreamingCallable createClientStreamingCallable( GrpcCallSettings grpcCallSettings, - StreamingCallSettings streamingCallSettings, + StreamingCallSettings callSettings, ClientContext clientContext) { return GrpcCallableFactory.createClientStreamingCallable( - grpcCallSettings, streamingCallSettings, clientContext); + grpcCallSettings, callSettings, clientContext); } } diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta1/stub/GrpcBigQueryStorageStub.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta1/stub/GrpcBigQueryStorageStub.java index 4ffa5f6309..7bd61a7407 100644 --- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta1/stub/GrpcBigQueryStorageStub.java +++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta1/stub/GrpcBigQueryStorageStub.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,9 +13,9 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.bigquery.storage.v1beta1.stub; -import com.google.api.core.BetaApi; import com.google.api.gax.core.BackgroundResource; import com.google.api.gax.core.BackgroundResourceAggregation; import com.google.api.gax.grpc.GrpcCallSettings; @@ -24,16 +24,9 @@ import com.google.api.gax.rpc.RequestParamsExtractor; import com.google.api.gax.rpc.ServerStreamingCallable; import com.google.api.gax.rpc.UnaryCallable; -import com.google.cloud.bigquery.storage.v1beta1.Storage.BatchCreateReadSessionStreamsRequest; -import com.google.cloud.bigquery.storage.v1beta1.Storage.BatchCreateReadSessionStreamsResponse; -import com.google.cloud.bigquery.storage.v1beta1.Storage.CreateReadSessionRequest; -import com.google.cloud.bigquery.storage.v1beta1.Storage.FinalizeStreamRequest; -import com.google.cloud.bigquery.storage.v1beta1.Storage.ReadRowsRequest; -import com.google.cloud.bigquery.storage.v1beta1.Storage.ReadRowsResponse; -import com.google.cloud.bigquery.storage.v1beta1.Storage.ReadSession; -import com.google.cloud.bigquery.storage.v1beta1.Storage.SplitReadStreamRequest; -import com.google.cloud.bigquery.storage.v1beta1.Storage.SplitReadStreamResponse; +import com.google.cloud.bigquery.storage.v1beta1.Storage; import com.google.common.collect.ImmutableMap; +import com.google.longrunning.stub.GrpcOperationsStub; import com.google.protobuf.Empty; import io.grpc.MethodDescriptor; import io.grpc.protobuf.ProtoUtils; @@ -42,81 +35,95 @@ import java.util.concurrent.TimeUnit; import javax.annotation.Generated; -// AUTO-GENERATED DOCUMENTATION AND CLASS +// AUTO-GENERATED DOCUMENTATION AND CLASS. /** - * gRPC stub implementation for BigQuery Storage API. + * gRPC stub implementation for the BigQueryStorage service API. * *

This class is for advanced usage and reflects the underlying API directly. */ -@Generated("by gapic-generator") -@BetaApi("A restructuring of stub classes is planned, so this may break in the future") +@Generated("by gapic-generator-java") public class GrpcBigQueryStorageStub extends BigQueryStorageStub { - - private static final MethodDescriptor + private static final MethodDescriptor createReadSessionMethodDescriptor = - MethodDescriptor.newBuilder() + MethodDescriptor.newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName( "google.cloud.bigquery.storage.v1beta1.BigQueryStorage/CreateReadSession") .setRequestMarshaller( - ProtoUtils.marshaller(CreateReadSessionRequest.getDefaultInstance())) - .setResponseMarshaller(ProtoUtils.marshaller(ReadSession.getDefaultInstance())) + ProtoUtils.marshaller(Storage.CreateReadSessionRequest.getDefaultInstance())) + .setResponseMarshaller( + ProtoUtils.marshaller(Storage.ReadSession.getDefaultInstance())) .build(); - private static final MethodDescriptor + + private static final MethodDescriptor readRowsMethodDescriptor = - MethodDescriptor.newBuilder() + MethodDescriptor.newBuilder() .setType(MethodDescriptor.MethodType.SERVER_STREAMING) .setFullMethodName("google.cloud.bigquery.storage.v1beta1.BigQueryStorage/ReadRows") - .setRequestMarshaller(ProtoUtils.marshaller(ReadRowsRequest.getDefaultInstance())) - .setResponseMarshaller(ProtoUtils.marshaller(ReadRowsResponse.getDefaultInstance())) + .setRequestMarshaller( + ProtoUtils.marshaller(Storage.ReadRowsRequest.getDefaultInstance())) + .setResponseMarshaller( + ProtoUtils.marshaller(Storage.ReadRowsResponse.getDefaultInstance())) .build(); + private static final MethodDescriptor< - BatchCreateReadSessionStreamsRequest, BatchCreateReadSessionStreamsResponse> + Storage.BatchCreateReadSessionStreamsRequest, + Storage.BatchCreateReadSessionStreamsResponse> batchCreateReadSessionStreamsMethodDescriptor = MethodDescriptor - . + . newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName( "google.cloud.bigquery.storage.v1beta1.BigQueryStorage/BatchCreateReadSessionStreams") .setRequestMarshaller( - ProtoUtils.marshaller(BatchCreateReadSessionStreamsRequest.getDefaultInstance())) + ProtoUtils.marshaller( + Storage.BatchCreateReadSessionStreamsRequest.getDefaultInstance())) .setResponseMarshaller( - ProtoUtils.marshaller(BatchCreateReadSessionStreamsResponse.getDefaultInstance())) + ProtoUtils.marshaller( + Storage.BatchCreateReadSessionStreamsResponse.getDefaultInstance())) .build(); - private static final MethodDescriptor + + private static final MethodDescriptor finalizeStreamMethodDescriptor = - MethodDescriptor.newBuilder() + MethodDescriptor.newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName( "google.cloud.bigquery.storage.v1beta1.BigQueryStorage/FinalizeStream") .setRequestMarshaller( - ProtoUtils.marshaller(FinalizeStreamRequest.getDefaultInstance())) + ProtoUtils.marshaller(Storage.FinalizeStreamRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(Empty.getDefaultInstance())) .build(); - private static final MethodDescriptor + + private static final MethodDescriptor< + Storage.SplitReadStreamRequest, Storage.SplitReadStreamResponse> splitReadStreamMethodDescriptor = - MethodDescriptor.newBuilder() + MethodDescriptor + .newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName( "google.cloud.bigquery.storage.v1beta1.BigQueryStorage/SplitReadStream") .setRequestMarshaller( - ProtoUtils.marshaller(SplitReadStreamRequest.getDefaultInstance())) + ProtoUtils.marshaller(Storage.SplitReadStreamRequest.getDefaultInstance())) .setResponseMarshaller( - ProtoUtils.marshaller(SplitReadStreamResponse.getDefaultInstance())) + ProtoUtils.marshaller(Storage.SplitReadStreamResponse.getDefaultInstance())) .build(); - private final BackgroundResource backgroundResources; - - private final UnaryCallable createReadSessionCallable; - private final ServerStreamingCallable readRowsCallable; + private final UnaryCallable + createReadSessionCallable; + private final ServerStreamingCallable + readRowsCallable; private final UnaryCallable< - BatchCreateReadSessionStreamsRequest, BatchCreateReadSessionStreamsResponse> + Storage.BatchCreateReadSessionStreamsRequest, + Storage.BatchCreateReadSessionStreamsResponse> batchCreateReadSessionStreamsCallable; - private final UnaryCallable finalizeStreamCallable; - private final UnaryCallable + private final UnaryCallable finalizeStreamCallable; + private final UnaryCallable splitReadStreamCallable; + private final BackgroundResource backgroundResources; + private final GrpcOperationsStub operationsStub; private final GrpcStubCallableFactory callableFactory; public static final GrpcBigQueryStorageStub create(BigQueryStorageStubSettings settings) @@ -136,53 +143,42 @@ public static final GrpcBigQueryStorageStub create( BigQueryStorageStubSettings.newBuilder().build(), clientContext, callableFactory); } - /** - * Constructs an instance of GrpcBigQueryStorageStub, using the given settings. This is protected - * so that it is easy to make a subclass, but otherwise, the static factory methods should be - * preferred. - */ protected GrpcBigQueryStorageStub( BigQueryStorageStubSettings settings, ClientContext clientContext) throws IOException { this(settings, clientContext, new GrpcBigQueryStorageCallableFactory()); } - /** - * Constructs an instance of GrpcBigQueryStorageStub, using the given settings. This is protected - * so that it is easy to make a subclass, but otherwise, the static factory methods should be - * preferred. - */ protected GrpcBigQueryStorageStub( BigQueryStorageStubSettings settings, ClientContext clientContext, GrpcStubCallableFactory callableFactory) throws IOException { this.callableFactory = callableFactory; + this.operationsStub = GrpcOperationsStub.create(clientContext, callableFactory); - GrpcCallSettings createReadSessionTransportSettings = - GrpcCallSettings.newBuilder() - .setMethodDescriptor(createReadSessionMethodDescriptor) - .setParamsExtractor( - new RequestParamsExtractor() { - @Override - public Map extract(CreateReadSessionRequest request) { - ImmutableMap.Builder params = ImmutableMap.builder(); - params.put( - "table_reference.project_id", - String.valueOf(request.getTableReference().getProjectId())); - params.put( - "table_reference.dataset_id", - String.valueOf(request.getTableReference().getDatasetId())); - return params.build(); - } - }) - .build(); - GrpcCallSettings readRowsTransportSettings = - GrpcCallSettings.newBuilder() + GrpcCallSettings + createReadSessionTransportSettings = + GrpcCallSettings.newBuilder() + .setMethodDescriptor(createReadSessionMethodDescriptor) + .setParamsExtractor( + new RequestParamsExtractor() { + @Override + public Map extract(Storage.CreateReadSessionRequest request) { + ImmutableMap.Builder params = ImmutableMap.builder(); + params.put( + "table_reference.project_id", + String.valueOf(request.getTableReference().getProjectId())); + return params.build(); + } + }) + .build(); + GrpcCallSettings readRowsTransportSettings = + GrpcCallSettings.newBuilder() .setMethodDescriptor(readRowsMethodDescriptor) .setParamsExtractor( - new RequestParamsExtractor() { + new RequestParamsExtractor() { @Override - public Map extract(ReadRowsRequest request) { + public Map extract(Storage.ReadRowsRequest request) { ImmutableMap.Builder params = ImmutableMap.builder(); params.put( "read_position.stream.name", @@ -191,44 +187,48 @@ public Map extract(ReadRowsRequest request) { } }) .build(); - GrpcCallSettings + GrpcCallSettings< + Storage.BatchCreateReadSessionStreamsRequest, + Storage.BatchCreateReadSessionStreamsResponse> batchCreateReadSessionStreamsTransportSettings = GrpcCallSettings - . + . newBuilder() .setMethodDescriptor(batchCreateReadSessionStreamsMethodDescriptor) .setParamsExtractor( - new RequestParamsExtractor() { + new RequestParamsExtractor() { @Override public Map extract( - BatchCreateReadSessionStreamsRequest request) { + Storage.BatchCreateReadSessionStreamsRequest request) { ImmutableMap.Builder params = ImmutableMap.builder(); params.put("session.name", String.valueOf(request.getSession().getName())); return params.build(); } }) .build(); - GrpcCallSettings finalizeStreamTransportSettings = - GrpcCallSettings.newBuilder() + GrpcCallSettings finalizeStreamTransportSettings = + GrpcCallSettings.newBuilder() .setMethodDescriptor(finalizeStreamMethodDescriptor) .setParamsExtractor( - new RequestParamsExtractor() { + new RequestParamsExtractor() { @Override - public Map extract(FinalizeStreamRequest request) { + public Map extract(Storage.FinalizeStreamRequest request) { ImmutableMap.Builder params = ImmutableMap.builder(); params.put("stream.name", String.valueOf(request.getStream().getName())); return params.build(); } }) .build(); - GrpcCallSettings + GrpcCallSettings splitReadStreamTransportSettings = - GrpcCallSettings.newBuilder() + GrpcCallSettings + .newBuilder() .setMethodDescriptor(splitReadStreamMethodDescriptor) .setParamsExtractor( - new RequestParamsExtractor() { + new RequestParamsExtractor() { @Override - public Map extract(SplitReadStreamRequest request) { + public Map extract(Storage.SplitReadStreamRequest request) { ImmutableMap.Builder params = ImmutableMap.builder(); params.put( "original_stream.name", @@ -258,27 +258,37 @@ public Map extract(SplitReadStreamRequest request) { callableFactory.createUnaryCallable( splitReadStreamTransportSettings, settings.splitReadStreamSettings(), clientContext); - backgroundResources = new BackgroundResourceAggregation(clientContext.getBackgroundResources()); + this.backgroundResources = + new BackgroundResourceAggregation(clientContext.getBackgroundResources()); + } + + public GrpcOperationsStub getOperationsStub() { + return operationsStub; } - public UnaryCallable createReadSessionCallable() { + public UnaryCallable + createReadSessionCallable() { return createReadSessionCallable; } - public ServerStreamingCallable readRowsCallable() { + public ServerStreamingCallable + readRowsCallable() { return readRowsCallable; } - public UnaryCallable + public UnaryCallable< + Storage.BatchCreateReadSessionStreamsRequest, + Storage.BatchCreateReadSessionStreamsResponse> batchCreateReadSessionStreamsCallable() { return batchCreateReadSessionStreamsCallable; } - public UnaryCallable finalizeStreamCallable() { + public UnaryCallable finalizeStreamCallable() { return finalizeStreamCallable; } - public UnaryCallable splitReadStreamCallable() { + public UnaryCallable + splitReadStreamCallable() { return splitReadStreamCallable; } diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/BaseBigQueryReadClient.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/BaseBigQueryReadClient.java index 12ac3ce6ca..73f4c14477 100644 --- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/BaseBigQueryReadClient.java +++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/BaseBigQueryReadClient.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,6 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.bigquery.storage.v1beta2; import com.google.api.core.BetaApi; @@ -22,10 +23,11 @@ import com.google.cloud.bigquery.storage.v1beta2.stub.BigQueryReadStub; import com.google.cloud.bigquery.storage.v1beta2.stub.BigQueryReadStubSettings; import java.io.IOException; +import java.util.Objects; import java.util.concurrent.TimeUnit; import javax.annotation.Generated; -// AUTO-GENERATED DOCUMENTATION AND SERVICE +// AUTO-GENERATED DOCUMENTATION AND CLASS. /** * Service Description: BigQuery Read API. * @@ -37,18 +39,7 @@ *

This class provides the ability to make remote calls to the backing service through method * calls that map to API methods. Sample code to get started: * - *

- * 
- * try (BaseBigQueryReadClient baseBigQueryReadClient = BaseBigQueryReadClient.create()) {
- *   ProjectName parent = ProjectName.of("[PROJECT]");
- *   ReadSession readSession = ReadSession.newBuilder().build();
- *   int maxStreamCount = 0;
- *   ReadSession response = baseBigQueryReadClient.createReadSession(parent, readSession, maxStreamCount);
- * }
- * 
- * 
- * - *

Note: close() needs to be called on the baseBigQueryReadClient object to clean up resources + *

Note: close() needs to be called on the BaseBigQueryReadClient object to clean up resources * such as threads. In the example above, try-with-resources is used, which automatically calls * close(). * @@ -77,30 +68,28 @@ * *

To customize credentials: * - *

- * 
+ * 
{@code
  * BaseBigQueryReadSettings baseBigQueryReadSettings =
  *     BaseBigQueryReadSettings.newBuilder()
  *         .setCredentialsProvider(FixedCredentialsProvider.create(myCredentials))
  *         .build();
  * BaseBigQueryReadClient baseBigQueryReadClient =
  *     BaseBigQueryReadClient.create(baseBigQueryReadSettings);
- * 
- * 
+ * }
* - * To customize the endpoint: + *

To customize the endpoint: * - *

- * 
+ * 
{@code
  * BaseBigQueryReadSettings baseBigQueryReadSettings =
  *     BaseBigQueryReadSettings.newBuilder().setEndpoint(myEndpoint).build();
  * BaseBigQueryReadClient baseBigQueryReadClient =
  *     BaseBigQueryReadClient.create(baseBigQueryReadSettings);
- * 
- * 
+ * }
+ * + *

Please refer to the GitHub repository's samples for more quickstart code snippets. */ -@Generated("by gapic-generator") @BetaApi +@Generated("by gapic-generator") public class BaseBigQueryReadClient implements BackgroundResource { private final BaseBigQueryReadSettings settings; private final BigQueryReadStub stub; @@ -121,7 +110,7 @@ public static final BaseBigQueryReadClient create(BaseBigQueryReadSettings setti /** * Constructs an instance of BaseBigQueryReadClient, using the given stub for making calls. This - * is for advanced usage - prefer to use BaseBigQueryReadSettings}. + * is for advanced usage - prefer using create(BaseBigQueryReadSettings). */ @BetaApi("A restructuring of stub classes is planned, so this may break in the future") public static final BaseBigQueryReadClient create(BigQueryReadStub stub) { @@ -153,7 +142,7 @@ public BigQueryReadStub getStub() { return stub; } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Creates a new read session. A read session divides the contents of a BigQuery table into one or * more streams, which can then be used to read data from the table. The read session also @@ -172,25 +161,14 @@ public BigQueryReadStub getStub() { *

Read sessions automatically expire 24 hours after they are created and do not require manual * clean-up by the caller. * - *

Sample code: - * - *


-   * try (BaseBigQueryReadClient baseBigQueryReadClient = BaseBigQueryReadClient.create()) {
-   *   ProjectName parent = ProjectName.of("[PROJECT]");
-   *   ReadSession readSession = ReadSession.newBuilder().build();
-   *   int maxStreamCount = 0;
-   *   ReadSession response = baseBigQueryReadClient.createReadSession(parent, readSession, maxStreamCount);
-   * }
-   * 
- * * @param parent Required. The request project that owns the session, in the form of * `projects/{project_id}`. - * @param readSession Required. Session to be created. - * @param maxStreamCount Max initial number of streams. If unset or zero, the server will provide - * a value of streams so as to produce reasonable throughput. Must be non-negative. The number - * of streams may be lower than the requested number, depending on the amount parallelism that - * is reasonable for the table. Error will be returned if the max count is greater than the - * current system max limit of 1,000. + * @param read_session Required. Session to be created. + * @param max_stream_count Max initial number of streams. If unset or zero, the server will + * provide a value of streams so as to produce reasonable throughput. Must be non-negative. + * The number of streams may be lower than the requested number, depending on the amount + * parallelism that is reasonable for the table. Error will be returned if the max count is + * greater than the current system max limit of 1,000. *

Streams must be read starting from offset 0. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -198,14 +176,14 @@ public final ReadSession createReadSession( ProjectName parent, ReadSession readSession, int maxStreamCount) { CreateReadSessionRequest request = CreateReadSessionRequest.newBuilder() - .setParent(parent == null ? null : parent.toString()) + .setParent(Objects.isNull(parent) ? null : parent.toString()) .setReadSession(readSession) .setMaxStreamCount(maxStreamCount) .build(); return createReadSession(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Creates a new read session. A read session divides the contents of a BigQuery table into one or * more streams, which can then be used to read data from the table. The read session also @@ -224,25 +202,14 @@ public final ReadSession createReadSession( *

Read sessions automatically expire 24 hours after they are created and do not require manual * clean-up by the caller. * - *

Sample code: - * - *


-   * try (BaseBigQueryReadClient baseBigQueryReadClient = BaseBigQueryReadClient.create()) {
-   *   ProjectName parent = ProjectName.of("[PROJECT]");
-   *   ReadSession readSession = ReadSession.newBuilder().build();
-   *   int maxStreamCount = 0;
-   *   ReadSession response = baseBigQueryReadClient.createReadSession(parent.toString(), readSession, maxStreamCount);
-   * }
-   * 
- * * @param parent Required. The request project that owns the session, in the form of * `projects/{project_id}`. - * @param readSession Required. Session to be created. - * @param maxStreamCount Max initial number of streams. If unset or zero, the server will provide - * a value of streams so as to produce reasonable throughput. Must be non-negative. The number - * of streams may be lower than the requested number, depending on the amount parallelism that - * is reasonable for the table. Error will be returned if the max count is greater than the - * current system max limit of 1,000. + * @param read_session Required. Session to be created. + * @param max_stream_count Max initial number of streams. If unset or zero, the server will + * provide a value of streams so as to produce reasonable throughput. Must be non-negative. + * The number of streams may be lower than the requested number, depending on the amount + * parallelism that is reasonable for the table. Error will be returned if the max count is + * greater than the current system max limit of 1,000. *

Streams must be read starting from offset 0. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -257,7 +224,7 @@ public final ReadSession createReadSession( return createReadSession(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Creates a new read session. A read session divides the contents of a BigQuery table into one or * more streams, which can then be used to read data from the table. The read session also @@ -276,20 +243,6 @@ public final ReadSession createReadSession( *

Read sessions automatically expire 24 hours after they are created and do not require manual * clean-up by the caller. * - *

Sample code: - * - *


-   * try (BaseBigQueryReadClient baseBigQueryReadClient = BaseBigQueryReadClient.create()) {
-   *   ProjectName parent = ProjectName.of("[PROJECT]");
-   *   ReadSession readSession = ReadSession.newBuilder().build();
-   *   CreateReadSessionRequest request = CreateReadSessionRequest.newBuilder()
-   *     .setParent(parent.toString())
-   *     .setReadSession(readSession)
-   *     .build();
-   *   ReadSession response = baseBigQueryReadClient.createReadSession(request);
-   * }
-   * 
- * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -297,7 +250,7 @@ public final ReadSession createReadSession(CreateReadSessionRequest request) { return createReadSessionCallable().call(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Creates a new read session. A read session divides the contents of a BigQuery table into one or * more streams, which can then be used to read data from the table. The read session also @@ -317,26 +270,12 @@ public final ReadSession createReadSession(CreateReadSessionRequest request) { * clean-up by the caller. * *

Sample code: - * - *


-   * try (BaseBigQueryReadClient baseBigQueryReadClient = BaseBigQueryReadClient.create()) {
-   *   ProjectName parent = ProjectName.of("[PROJECT]");
-   *   ReadSession readSession = ReadSession.newBuilder().build();
-   *   CreateReadSessionRequest request = CreateReadSessionRequest.newBuilder()
-   *     .setParent(parent.toString())
-   *     .setReadSession(readSession)
-   *     .build();
-   *   ApiFuture<ReadSession> future = baseBigQueryReadClient.createReadSessionCallable().futureCall(request);
-   *   // Do something
-   *   ReadSession response = future.get();
-   * }
-   * 
*/ public final UnaryCallable createReadSessionCallable() { return stub.createReadSessionCallable(); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Reads rows from the stream in the format prescribed by the ReadSession. Each response contains * one or more table rows, up to a maximum of 100 MiB per response; read requests which attempt to @@ -346,26 +285,12 @@ public final UnaryCallable createReadSess * stream. * *

Sample code: - * - *


-   * try (BaseBigQueryReadClient baseBigQueryReadClient = BaseBigQueryReadClient.create()) {
-   *   ReadStreamName readStream = ReadStreamName.of("[PROJECT]", "[LOCATION]", "[SESSION]", "[STREAM]");
-   *   ReadRowsRequest request = ReadRowsRequest.newBuilder()
-   *     .setReadStream(readStream.toString())
-   *     .build();
-   *
-   *   ServerStream<ReadRowsResponse> stream = baseBigQueryReadClient.readRowsCallable().call(request);
-   *   for (ReadRowsResponse response : stream) {
-   *     // Do something when receive a response
-   *   }
-   * }
-   * 
*/ public final ServerStreamingCallable readRowsCallable() { return stub.readRowsCallable(); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Splits a given `ReadStream` into two `ReadStream` objects. These `ReadStream` objects are * referred to as the primary and the residual streams of the split. The original `ReadStream` can @@ -378,18 +303,6 @@ public final ServerStreamingCallable readRows * original[0-j] = primary[0-j] and original[j-n] = residual[0-m] once the streams have been read * to completion. * - *

Sample code: - * - *


-   * try (BaseBigQueryReadClient baseBigQueryReadClient = BaseBigQueryReadClient.create()) {
-   *   ReadStreamName name = ReadStreamName.of("[PROJECT]", "[LOCATION]", "[SESSION]", "[STREAM]");
-   *   SplitReadStreamRequest request = SplitReadStreamRequest.newBuilder()
-   *     .setName(name.toString())
-   *     .build();
-   *   SplitReadStreamResponse response = baseBigQueryReadClient.splitReadStream(request);
-   * }
-   * 
- * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -397,7 +310,7 @@ public final SplitReadStreamResponse splitReadStream(SplitReadStreamRequest requ return splitReadStreamCallable().call(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Splits a given `ReadStream` into two `ReadStream` objects. These `ReadStream` objects are * referred to as the primary and the residual streams of the split. The original `ReadStream` can @@ -411,18 +324,6 @@ public final SplitReadStreamResponse splitReadStream(SplitReadStreamRequest requ * to completion. * *

Sample code: - * - *


-   * try (BaseBigQueryReadClient baseBigQueryReadClient = BaseBigQueryReadClient.create()) {
-   *   ReadStreamName name = ReadStreamName.of("[PROJECT]", "[LOCATION]", "[SESSION]", "[STREAM]");
-   *   SplitReadStreamRequest request = SplitReadStreamRequest.newBuilder()
-   *     .setName(name.toString())
-   *     .build();
-   *   ApiFuture<SplitReadStreamResponse> future = baseBigQueryReadClient.splitReadStreamCallable().futureCall(request);
-   *   // Do something
-   *   SplitReadStreamResponse response = future.get();
-   * }
-   * 
*/ public final UnaryCallable splitReadStreamCallable() { diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/BaseBigQueryReadSettings.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/BaseBigQueryReadSettings.java index 6570a55fc8..464224d2d1 100644 --- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/BaseBigQueryReadSettings.java +++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/BaseBigQueryReadSettings.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,6 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.bigquery.storage.v1beta2; import com.google.api.core.ApiFunction; @@ -31,7 +32,7 @@ import java.util.List; import javax.annotation.Generated; -// AUTO-GENERATED DOCUMENTATION AND CLASS +// AUTO-GENERATED DOCUMENTATION AND CLASS. /** * Settings class to configure an instance of {@link BaseBigQueryReadClient}. * @@ -49,23 +50,24 @@ * *

For example, to set the total timeout of createReadSession to 30 seconds: * - *

- * 
+ * 
{@code
  * BaseBigQueryReadSettings.Builder baseBigQueryReadSettingsBuilder =
  *     BaseBigQueryReadSettings.newBuilder();
  * baseBigQueryReadSettingsBuilder
  *     .createReadSessionSettings()
  *     .setRetrySettings(
- *         baseBigQueryReadSettingsBuilder.createReadSessionSettings().getRetrySettings().toBuilder()
+ *         baseBigQueryReadSettingsBuilder
+ *             .createReadSessionSettings()
+ *             .getRetrySettings()
+ *             .toBuilder()
  *             .setTotalTimeout(Duration.ofSeconds(30))
  *             .build());
  * BaseBigQueryReadSettings baseBigQueryReadSettings = baseBigQueryReadSettingsBuilder.build();
- * 
- * 
+ * }
*/ -@Generated("by gapic-generator") -@BetaApi +@Generated("by gapic-generator-java") public class BaseBigQueryReadSettings extends ClientSettings { + /** Returns the object with the settings used for calls to createReadSession. */ public UnaryCallSettings createReadSessionSettings() { return ((BigQueryReadStubSettings) getStubSettings()).createReadSessionSettings(); @@ -142,18 +144,15 @@ protected BaseBigQueryReadSettings(Builder settingsBuilder) throws IOException { /** Builder for BaseBigQueryReadSettings. */ public static class Builder extends ClientSettings.Builder { + protected Builder() throws IOException { - this((ClientContext) null); + this(((ClientContext) null)); } protected Builder(ClientContext clientContext) { super(BigQueryReadStubSettings.newBuilder(clientContext)); } - private static Builder createDefault() { - return new Builder(BigQueryReadStubSettings.newBuilder()); - } - protected Builder(BaseBigQueryReadSettings settings) { super(settings.getStubSettings().toBuilder()); } @@ -162,11 +161,15 @@ protected Builder(BigQueryReadStubSettings.Builder stubSettings) { super(stubSettings); } + private static Builder createDefault() { + return new Builder(BigQueryReadStubSettings.newBuilder()); + } + public BigQueryReadStubSettings.Builder getStubSettingsBuilder() { return ((BigQueryReadStubSettings.Builder) getStubSettings()); } - // NEXT_MAJOR_VER: remove 'throws Exception' + // NEXT_MAJOR_VER: remove 'throws Exception'. /** * Applies the given settings updater function to all of the unary API methods in this service. * diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/BigQueryWriteClient.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/BigQueryWriteClient.java index e902043f97..32515d9178 100644 --- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/BigQueryWriteClient.java +++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/BigQueryWriteClient.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,6 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.bigquery.storage.v1beta2; import com.google.api.core.BetaApi; @@ -22,10 +23,11 @@ import com.google.cloud.bigquery.storage.v1beta2.stub.BigQueryWriteStub; import com.google.cloud.bigquery.storage.v1beta2.stub.BigQueryWriteStubSettings; import java.io.IOException; +import java.util.Objects; import java.util.concurrent.TimeUnit; import javax.annotation.Generated; -// AUTO-GENERATED DOCUMENTATION AND SERVICE +// AUTO-GENERATED DOCUMENTATION AND CLASS. /** * Service Description: BigQuery Write API. * @@ -34,17 +36,7 @@ *

This class provides the ability to make remote calls to the backing service through method * calls that map to API methods. Sample code to get started: * - *

- * 
- * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
- *   TableName parent = TableName.of("[PROJECT]", "[DATASET]", "[TABLE]");
- *   WriteStream writeStream = WriteStream.newBuilder().build();
- *   WriteStream response = bigQueryWriteClient.createWriteStream(parent, writeStream);
- * }
- * 
- * 
- * - *

Note: close() needs to be called on the bigQueryWriteClient object to clean up resources such + *

Note: close() needs to be called on the BigQueryWriteClient object to clean up resources such * as threads. In the example above, try-with-resources is used, which automatically calls close(). * *

The surface of this class includes several types of Java methods for each of the API's @@ -72,30 +64,26 @@ * *

To customize credentials: * - *

- * 
+ * 
{@code
  * BigQueryWriteSettings bigQueryWriteSettings =
  *     BigQueryWriteSettings.newBuilder()
  *         .setCredentialsProvider(FixedCredentialsProvider.create(myCredentials))
  *         .build();
- * BigQueryWriteClient bigQueryWriteClient =
- *     BigQueryWriteClient.create(bigQueryWriteSettings);
- * 
- * 
+ * BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create(bigQueryWriteSettings); + * }
* - * To customize the endpoint: + *

To customize the endpoint: * - *

- * 
+ * 
{@code
  * BigQueryWriteSettings bigQueryWriteSettings =
  *     BigQueryWriteSettings.newBuilder().setEndpoint(myEndpoint).build();
- * BigQueryWriteClient bigQueryWriteClient =
- *     BigQueryWriteClient.create(bigQueryWriteSettings);
- * 
- * 
+ * BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create(bigQueryWriteSettings); + * }
+ * + *

Please refer to the GitHub repository's samples for more quickstart code snippets. */ -@Generated("by gapic-generator") @BetaApi +@Generated("by gapic-generator") public class BigQueryWriteClient implements BackgroundResource { private final BigQueryWriteSettings settings; private final BigQueryWriteStub stub; @@ -116,7 +104,7 @@ public static final BigQueryWriteClient create(BigQueryWriteSettings settings) /** * Constructs an instance of BigQueryWriteClient, using the given stub for making calls. This is - * for advanced usage - prefer to use BigQueryWriteSettings}. + * for advanced usage - prefer using create(BigQueryWriteSettings). */ @BetaApi("A restructuring of stub classes is planned, so this may break in the future") public static final BigQueryWriteClient create(BigQueryWriteStub stub) { @@ -148,7 +136,7 @@ public BigQueryWriteStub getStub() { return stub; } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Creates a write stream to the given table. Additionally, every table has a special COMMITTED * stream named '_default' to which data can be written. This stream doesn't need to be created @@ -156,31 +144,21 @@ public BigQueryWriteStub getStub() { * clients. Data written to this stream is considered committed as soon as an acknowledgement is * received. * - *

Sample code: - * - *


-   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
-   *   TableName parent = TableName.of("[PROJECT]", "[DATASET]", "[TABLE]");
-   *   WriteStream writeStream = WriteStream.newBuilder().build();
-   *   WriteStream response = bigQueryWriteClient.createWriteStream(parent, writeStream);
-   * }
-   * 
- * * @param parent Required. Reference to the table to which the stream belongs, in the format of * `projects/{project}/datasets/{dataset}/tables/{table}`. - * @param writeStream Required. Stream to be created. + * @param write_stream Required. Stream to be created. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ public final WriteStream createWriteStream(TableName parent, WriteStream writeStream) { CreateWriteStreamRequest request = CreateWriteStreamRequest.newBuilder() - .setParent(parent == null ? null : parent.toString()) + .setParent(Objects.isNull(parent) ? null : parent.toString()) .setWriteStream(writeStream) .build(); return createWriteStream(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Creates a write stream to the given table. Additionally, every table has a special COMMITTED * stream named '_default' to which data can be written. This stream doesn't need to be created @@ -188,19 +166,9 @@ public final WriteStream createWriteStream(TableName parent, WriteStream writeSt * clients. Data written to this stream is considered committed as soon as an acknowledgement is * received. * - *

Sample code: - * - *


-   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
-   *   TableName parent = TableName.of("[PROJECT]", "[DATASET]", "[TABLE]");
-   *   WriteStream writeStream = WriteStream.newBuilder().build();
-   *   WriteStream response = bigQueryWriteClient.createWriteStream(parent.toString(), writeStream);
-   * }
-   * 
- * * @param parent Required. Reference to the table to which the stream belongs, in the format of * `projects/{project}/datasets/{dataset}/tables/{table}`. - * @param writeStream Required. Stream to be created. + * @param write_stream Required. Stream to be created. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ public final WriteStream createWriteStream(String parent, WriteStream writeStream) { @@ -209,7 +177,7 @@ public final WriteStream createWriteStream(String parent, WriteStream writeStrea return createWriteStream(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Creates a write stream to the given table. Additionally, every table has a special COMMITTED * stream named '_default' to which data can be written. This stream doesn't need to be created @@ -217,20 +185,6 @@ public final WriteStream createWriteStream(String parent, WriteStream writeStrea * clients. Data written to this stream is considered committed as soon as an acknowledgement is * received. * - *

Sample code: - * - *


-   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
-   *   TableName parent = TableName.of("[PROJECT]", "[DATASET]", "[TABLE]");
-   *   WriteStream writeStream = WriteStream.newBuilder().build();
-   *   CreateWriteStreamRequest request = CreateWriteStreamRequest.newBuilder()
-   *     .setParent(parent.toString())
-   *     .setWriteStream(writeStream)
-   *     .build();
-   *   WriteStream response = bigQueryWriteClient.createWriteStream(request);
-   * }
-   * 
- * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -238,7 +192,7 @@ public final WriteStream createWriteStream(CreateWriteStreamRequest request) { return createWriteStreamCallable().call(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Creates a write stream to the given table. Additionally, every table has a special COMMITTED * stream named '_default' to which data can be written. This stream doesn't need to be created @@ -247,26 +201,12 @@ public final WriteStream createWriteStream(CreateWriteStreamRequest request) { * received. * *

Sample code: - * - *


-   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
-   *   TableName parent = TableName.of("[PROJECT]", "[DATASET]", "[TABLE]");
-   *   WriteStream writeStream = WriteStream.newBuilder().build();
-   *   CreateWriteStreamRequest request = CreateWriteStreamRequest.newBuilder()
-   *     .setParent(parent.toString())
-   *     .setWriteStream(writeStream)
-   *     .build();
-   *   ApiFuture<WriteStream> future = bigQueryWriteClient.createWriteStreamCallable().futureCall(request);
-   *   // Do something
-   *   WriteStream response = future.get();
-   * }
-   * 
*/ public final UnaryCallable createWriteStreamCallable() { return stub.createWriteStreamCallable(); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Appends data to the given stream. * @@ -286,63 +226,31 @@ public final UnaryCallable createWriteStr * the stream is committed. * *

Sample code: - * - *


-   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
-   *   BidiStream<AppendRowsRequest, AppendRowsResponse> bidiStream =
-   *       bigQueryWriteClient.appendRowsCallable().call();
-   *
-   *   WriteStreamName writeStream = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]");
-   *   AppendRowsRequest request = AppendRowsRequest.newBuilder()
-   *     .setWriteStream(writeStream.toString())
-   *     .build();
-   *   bidiStream.send(request);
-   *   for (AppendRowsResponse response : bidiStream) {
-   *     // Do something when receive a response
-   *   }
-   * }
-   * 
*/ public final BidiStreamingCallable appendRowsCallable() { return stub.appendRowsCallable(); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Gets a write stream. * - *

Sample code: - * - *


-   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
-   *   WriteStreamName name = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]");
-   *   WriteStream response = bigQueryWriteClient.getWriteStream(name);
-   * }
-   * 
- * * @param name Required. Name of the stream to get, in the form of * `projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}`. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ public final WriteStream getWriteStream(WriteStreamName name) { GetWriteStreamRequest request = - GetWriteStreamRequest.newBuilder().setName(name == null ? null : name.toString()).build(); + GetWriteStreamRequest.newBuilder() + .setName(Objects.isNull(name) ? null : name.toString()) + .build(); return getWriteStream(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Gets a write stream. * - *

Sample code: - * - *


-   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
-   *   WriteStreamName name = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]");
-   *   WriteStream response = bigQueryWriteClient.getWriteStream(name.toString());
-   * }
-   * 
- * * @param name Required. Name of the stream to get, in the form of * `projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}`. * @throws com.google.api.gax.rpc.ApiException if the remote call fails @@ -352,22 +260,10 @@ public final WriteStream getWriteStream(String name) { return getWriteStream(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Gets a write stream. * - *

Sample code: - * - *


-   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
-   *   WriteStreamName name = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]");
-   *   GetWriteStreamRequest request = GetWriteStreamRequest.newBuilder()
-   *     .setName(name.toString())
-   *     .build();
-   *   WriteStream response = bigQueryWriteClient.getWriteStream(request);
-   * }
-   * 
- * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -375,42 +271,21 @@ public final WriteStream getWriteStream(GetWriteStreamRequest request) { return getWriteStreamCallable().call(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Gets a write stream. * *

Sample code: - * - *


-   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
-   *   WriteStreamName name = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]");
-   *   GetWriteStreamRequest request = GetWriteStreamRequest.newBuilder()
-   *     .setName(name.toString())
-   *     .build();
-   *   ApiFuture<WriteStream> future = bigQueryWriteClient.getWriteStreamCallable().futureCall(request);
-   *   // Do something
-   *   WriteStream response = future.get();
-   * }
-   * 
*/ public final UnaryCallable getWriteStreamCallable() { return stub.getWriteStreamCallable(); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Finalize a write stream so that no new data can be appended to the stream. Finalize is not * supported on the '_default' stream. * - *

Sample code: - * - *


-   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
-   *   WriteStreamName name = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]");
-   *   FinalizeWriteStreamResponse response = bigQueryWriteClient.finalizeWriteStream(name);
-   * }
-   * 
- * * @param name Required. Name of the stream to finalize, in the form of * `projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}`. * @throws com.google.api.gax.rpc.ApiException if the remote call fails @@ -418,25 +293,16 @@ public final UnaryCallable getWriteStreamCal public final FinalizeWriteStreamResponse finalizeWriteStream(WriteStreamName name) { FinalizeWriteStreamRequest request = FinalizeWriteStreamRequest.newBuilder() - .setName(name == null ? null : name.toString()) + .setName(Objects.isNull(name) ? null : name.toString()) .build(); return finalizeWriteStream(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Finalize a write stream so that no new data can be appended to the stream. Finalize is not * supported on the '_default' stream. * - *

Sample code: - * - *


-   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
-   *   WriteStreamName name = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]");
-   *   FinalizeWriteStreamResponse response = bigQueryWriteClient.finalizeWriteStream(name.toString());
-   * }
-   * 
- * * @param name Required. Name of the stream to finalize, in the form of * `projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}`. * @throws com.google.api.gax.rpc.ApiException if the remote call fails @@ -447,23 +313,11 @@ public final FinalizeWriteStreamResponse finalizeWriteStream(String name) { return finalizeWriteStream(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Finalize a write stream so that no new data can be appended to the stream. Finalize is not * supported on the '_default' stream. * - *

Sample code: - * - *


-   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
-   *   WriteStreamName name = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]");
-   *   FinalizeWriteStreamRequest request = FinalizeWriteStreamRequest.newBuilder()
-   *     .setName(name.toString())
-   *     .build();
-   *   FinalizeWriteStreamResponse response = bigQueryWriteClient.finalizeWriteStream(request);
-   * }
-   * 
- * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -471,45 +325,24 @@ public final FinalizeWriteStreamResponse finalizeWriteStream(FinalizeWriteStream return finalizeWriteStreamCallable().call(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Finalize a write stream so that no new data can be appended to the stream. Finalize is not * supported on the '_default' stream. * *

Sample code: - * - *


-   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
-   *   WriteStreamName name = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]");
-   *   FinalizeWriteStreamRequest request = FinalizeWriteStreamRequest.newBuilder()
-   *     .setName(name.toString())
-   *     .build();
-   *   ApiFuture<FinalizeWriteStreamResponse> future = bigQueryWriteClient.finalizeWriteStreamCallable().futureCall(request);
-   *   // Do something
-   *   FinalizeWriteStreamResponse response = future.get();
-   * }
-   * 
*/ public final UnaryCallable finalizeWriteStreamCallable() { return stub.finalizeWriteStreamCallable(); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Atomically commits a group of `PENDING` streams that belong to the same `parent` table. Streams * must be finalized before commit and cannot be committed multiple times. Once a stream is * committed, data in the stream becomes available for read operations. * - *

Sample code: - * - *


-   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
-   *   String parent = "";
-   *   BatchCommitWriteStreamsResponse response = bigQueryWriteClient.batchCommitWriteStreams(parent);
-   * }
-   * 
- * * @param parent Required. Parent table that all the streams should belong to, in the form of * `projects/{project}/datasets/{dataset}/tables/{table}`. * @throws com.google.api.gax.rpc.ApiException if the remote call fails @@ -520,26 +353,12 @@ public final BatchCommitWriteStreamsResponse batchCommitWriteStreams(String pare return batchCommitWriteStreams(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Atomically commits a group of `PENDING` streams that belong to the same `parent` table. Streams * must be finalized before commit and cannot be committed multiple times. Once a stream is * committed, data in the stream becomes available for read operations. * - *

Sample code: - * - *


-   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
-   *   String parent = "";
-   *   List<String> writeStreams = new ArrayList<>();
-   *   BatchCommitWriteStreamsRequest request = BatchCommitWriteStreamsRequest.newBuilder()
-   *     .setParent(parent)
-   *     .addAllWriteStreams(writeStreams)
-   *     .build();
-   *   BatchCommitWriteStreamsResponse response = bigQueryWriteClient.batchCommitWriteStreams(request);
-   * }
-   * 
- * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -548,77 +367,45 @@ public final BatchCommitWriteStreamsResponse batchCommitWriteStreams( return batchCommitWriteStreamsCallable().call(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Atomically commits a group of `PENDING` streams that belong to the same `parent` table. Streams * must be finalized before commit and cannot be committed multiple times. Once a stream is * committed, data in the stream becomes available for read operations. * *

Sample code: - * - *


-   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
-   *   String parent = "";
-   *   List<String> writeStreams = new ArrayList<>();
-   *   BatchCommitWriteStreamsRequest request = BatchCommitWriteStreamsRequest.newBuilder()
-   *     .setParent(parent)
-   *     .addAllWriteStreams(writeStreams)
-   *     .build();
-   *   ApiFuture<BatchCommitWriteStreamsResponse> future = bigQueryWriteClient.batchCommitWriteStreamsCallable().futureCall(request);
-   *   // Do something
-   *   BatchCommitWriteStreamsResponse response = future.get();
-   * }
-   * 
*/ public final UnaryCallable batchCommitWriteStreamsCallable() { return stub.batchCommitWriteStreamsCallable(); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Flushes rows to a BUFFERED stream. If users are appending rows to BUFFERED stream, flush * operation is required in order for the rows to become available for reading. A Flush operation * flushes up to any previously flushed offset in a BUFFERED stream, to the offset specified in * the request. Flush is not supported on the _default stream, since it is not BUFFERED. * - *

Sample code: - * - *


-   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
-   *   WriteStreamName writeStream = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]");
-   *   FlushRowsResponse response = bigQueryWriteClient.flushRows(writeStream);
-   * }
-   * 
- * - * @param writeStream Required. The stream that is the target of the flush operation. + * @param write_stream Required. The stream that is the target of the flush operation. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ public final FlushRowsResponse flushRows(WriteStreamName writeStream) { FlushRowsRequest request = FlushRowsRequest.newBuilder() - .setWriteStream(writeStream == null ? null : writeStream.toString()) + .setWriteStream(Objects.isNull(writeStream) ? null : writeStream.toString()) .build(); return flushRows(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Flushes rows to a BUFFERED stream. If users are appending rows to BUFFERED stream, flush * operation is required in order for the rows to become available for reading. A Flush operation * flushes up to any previously flushed offset in a BUFFERED stream, to the offset specified in * the request. Flush is not supported on the _default stream, since it is not BUFFERED. * - *

Sample code: - * - *


-   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
-   *   WriteStreamName writeStream = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]");
-   *   FlushRowsResponse response = bigQueryWriteClient.flushRows(writeStream.toString());
-   * }
-   * 
- * - * @param writeStream Required. The stream that is the target of the flush operation. + * @param write_stream Required. The stream that is the target of the flush operation. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ public final FlushRowsResponse flushRows(String writeStream) { @@ -626,25 +413,13 @@ public final FlushRowsResponse flushRows(String writeStream) { return flushRows(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Flushes rows to a BUFFERED stream. If users are appending rows to BUFFERED stream, flush * operation is required in order for the rows to become available for reading. A Flush operation * flushes up to any previously flushed offset in a BUFFERED stream, to the offset specified in * the request. Flush is not supported on the _default stream, since it is not BUFFERED. * - *

Sample code: - * - *


-   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
-   *   WriteStreamName writeStream = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]");
-   *   FlushRowsRequest request = FlushRowsRequest.newBuilder()
-   *     .setWriteStream(writeStream.toString())
-   *     .build();
-   *   FlushRowsResponse response = bigQueryWriteClient.flushRows(request);
-   * }
-   * 
- * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -652,7 +427,7 @@ public final FlushRowsResponse flushRows(FlushRowsRequest request) { return flushRowsCallable().call(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Flushes rows to a BUFFERED stream. If users are appending rows to BUFFERED stream, flush * operation is required in order for the rows to become available for reading. A Flush operation @@ -660,18 +435,6 @@ public final FlushRowsResponse flushRows(FlushRowsRequest request) { * the request. Flush is not supported on the _default stream, since it is not BUFFERED. * *

Sample code: - * - *


-   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
-   *   WriteStreamName writeStream = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]");
-   *   FlushRowsRequest request = FlushRowsRequest.newBuilder()
-   *     .setWriteStream(writeStream.toString())
-   *     .build();
-   *   ApiFuture<FlushRowsResponse> future = bigQueryWriteClient.flushRowsCallable().futureCall(request);
-   *   // Do something
-   *   FlushRowsResponse response = future.get();
-   * }
-   * 
*/ public final UnaryCallable flushRowsCallable() { return stub.flushRowsCallable(); diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/BigQueryWriteSettings.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/BigQueryWriteSettings.java index 154534dbdc..3ec1da642e 100644 --- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/BigQueryWriteSettings.java +++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/BigQueryWriteSettings.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,6 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.bigquery.storage.v1beta2; import com.google.api.core.ApiFunction; @@ -31,7 +32,7 @@ import java.util.List; import javax.annotation.Generated; -// AUTO-GENERATED DOCUMENTATION AND CLASS +// AUTO-GENERATED DOCUMENTATION AND CLASS. /** * Settings class to configure an instance of {@link BigQueryWriteClient}. * @@ -49,23 +50,23 @@ * *

For example, to set the total timeout of createWriteStream to 30 seconds: * - *

- * 
- * BigQueryWriteSettings.Builder bigQueryWriteSettingsBuilder =
- *     BigQueryWriteSettings.newBuilder();
+ * 
{@code
+ * BigQueryWriteSettings.Builder bigQueryWriteSettingsBuilder = BigQueryWriteSettings.newBuilder();
  * bigQueryWriteSettingsBuilder
  *     .createWriteStreamSettings()
  *     .setRetrySettings(
- *         bigQueryWriteSettingsBuilder.createWriteStreamSettings().getRetrySettings().toBuilder()
+ *         bigQueryWriteSettingsBuilder
+ *             .createWriteStreamSettings()
+ *             .getRetrySettings()
+ *             .toBuilder()
  *             .setTotalTimeout(Duration.ofSeconds(30))
  *             .build());
  * BigQueryWriteSettings bigQueryWriteSettings = bigQueryWriteSettingsBuilder.build();
- * 
- * 
+ * }
*/ -@Generated("by gapic-generator") -@BetaApi +@Generated("by gapic-generator-java") public class BigQueryWriteSettings extends ClientSettings { + /** Returns the object with the settings used for calls to createWriteStream. */ public UnaryCallSettings createWriteStreamSettings() { return ((BigQueryWriteStubSettings) getStubSettings()).createWriteStreamSettings(); @@ -158,18 +159,15 @@ protected BigQueryWriteSettings(Builder settingsBuilder) throws IOException { /** Builder for BigQueryWriteSettings. */ public static class Builder extends ClientSettings.Builder { + protected Builder() throws IOException { - this((ClientContext) null); + this(((ClientContext) null)); } protected Builder(ClientContext clientContext) { super(BigQueryWriteStubSettings.newBuilder(clientContext)); } - private static Builder createDefault() { - return new Builder(BigQueryWriteStubSettings.newBuilder()); - } - protected Builder(BigQueryWriteSettings settings) { super(settings.getStubSettings().toBuilder()); } @@ -178,11 +176,15 @@ protected Builder(BigQueryWriteStubSettings.Builder stubSettings) { super(stubSettings); } + private static Builder createDefault() { + return new Builder(BigQueryWriteStubSettings.newBuilder()); + } + public BigQueryWriteStubSettings.Builder getStubSettingsBuilder() { return ((BigQueryWriteStubSettings.Builder) getStubSettings()); } - // NEXT_MAJOR_VER: remove 'throws Exception' + // NEXT_MAJOR_VER: remove 'throws Exception'. /** * Applies the given settings updater function to all of the unary API methods in this service. * diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/package-info.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/package-info.java index b53d96890d..da92a24b0c 100644 --- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/package-info.java +++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/package-info.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,11 +15,9 @@ */ /** - * A client to BigQuery Storage API. + * The interfaces provided are listed below, along with usage samples. * - *

The interfaces provided are listed below, along with usage samples. - * - *

====================== BaseBigQueryReadClient ====================== + *

======================= BigQueryReadClient ======================= * *

Service Description: BigQuery Read API. * @@ -28,38 +26,17 @@ *

New code should use the v1 Read API going forward, if they don't use Write API at the same * time. * - *

Sample for BaseBigQueryReadClient: - * - *

- * 
- * try (BaseBigQueryReadClient baseBigQueryReadClient = BaseBigQueryReadClient.create()) {
- *   ProjectName parent = ProjectName.of("[PROJECT]");
- *   ReadSession readSession = ReadSession.newBuilder().build();
- *   int maxStreamCount = 0;
- *   ReadSession response = baseBigQueryReadClient.createReadSession(parent, readSession, maxStreamCount);
- * }
- * 
- * 
+ *

Sample for BigQueryReadClient: * - * =================== BigQueryWriteClient =================== + *

======================= BigQueryWriteClient ======================= * *

Service Description: BigQuery Write API. * *

The Write API can be used to write data to BigQuery. * *

Sample for BigQueryWriteClient: - * - *

- * 
- * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
- *   TableName parent = TableName.of("[PROJECT]", "[DATASET]", "[TABLE]");
- *   WriteStream writeStream = WriteStream.newBuilder().build();
- *   WriteStream response = bigQueryWriteClient.createWriteStream(parent, writeStream);
- * }
- * 
- * 
*/ -@Generated("by gapic-generator") +@Generated("by gapic-generator-java") package com.google.cloud.bigquery.storage.v1beta2; import javax.annotation.Generated; diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/stub/BigQueryReadStub.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/stub/BigQueryReadStub.java index 116f00c54f..ee9bfd6aba 100644 --- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/stub/BigQueryReadStub.java +++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/stub/BigQueryReadStub.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,9 +13,9 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.bigquery.storage.v1beta2.stub; -import com.google.api.core.BetaApi; import com.google.api.gax.core.BackgroundResource; import com.google.api.gax.rpc.ServerStreamingCallable; import com.google.api.gax.rpc.UnaryCallable; @@ -27,14 +27,13 @@ import com.google.cloud.bigquery.storage.v1beta2.SplitReadStreamResponse; import javax.annotation.Generated; -// AUTO-GENERATED DOCUMENTATION AND CLASS +// AUTO-GENERATED DOCUMENTATION AND CLASS. /** - * Base stub class for BigQuery Storage API. + * Base stub class for the BigQueryRead service API. * *

This class is for advanced usage and reflects the underlying API directly. */ @Generated("by gapic-generator") -@BetaApi("A restructuring of stub classes is planned, so this may break in the future") public abstract class BigQueryReadStub implements BackgroundResource { public UnaryCallable createReadSessionCallable() { diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/stub/BigQueryReadStubSettings.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/stub/BigQueryReadStubSettings.java index 1e9f940a35..bb79df916a 100644 --- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/stub/BigQueryReadStubSettings.java +++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/stub/BigQueryReadStubSettings.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,6 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.bigquery.storage.v1beta2.stub; import com.google.api.core.ApiFunction; @@ -46,7 +47,7 @@ import javax.annotation.Generated; import org.threeten.bp.Duration; -// AUTO-GENERATED DOCUMENTATION AND CLASS +// AUTO-GENERATED DOCUMENTATION AND CLASS. /** * Settings class to configure an instance of {@link BigQueryReadStub}. * @@ -64,28 +65,28 @@ * *

For example, to set the total timeout of createReadSession to 30 seconds: * - *

- * 
- * BigQueryReadStubSettings.Builder baseBigQueryReadSettingsBuilder =
+ * 
{@code
+ * BigQueryReadStubSettings.Builder bigQueryReadSettingsBuilder =
  *     BigQueryReadStubSettings.newBuilder();
- * baseBigQueryReadSettingsBuilder
+ * bigQueryReadSettingsBuilder
  *     .createReadSessionSettings()
  *     .setRetrySettings(
- *         baseBigQueryReadSettingsBuilder.createReadSessionSettings().getRetrySettings().toBuilder()
+ *         bigQueryReadSettingsBuilder
+ *             .createReadSessionSettings()
+ *             .getRetrySettings()
+ *             .toBuilder()
  *             .setTotalTimeout(Duration.ofSeconds(30))
  *             .build());
- * BigQueryReadStubSettings baseBigQueryReadSettings = baseBigQueryReadSettingsBuilder.build();
- * 
- * 
+ * BigQueryReadStubSettings bigQueryReadSettings = bigQueryReadSettingsBuilder.build(); + * }
*/ -@Generated("by gapic-generator") @BetaApi +@Generated("by gapic-generator-java") public class BigQueryReadStubSettings extends StubSettings { /** The default scopes of the service. */ private static final ImmutableList DEFAULT_SERVICE_SCOPES = ImmutableList.builder() .add("https://www.googleapis.com/auth/bigquery") - .add("https://www.googleapis.com/auth/bigquery.insertdata") .add("https://www.googleapis.com/auth/bigquery.readonly") .add("https://www.googleapis.com/auth/cloud-platform") .build(); @@ -117,10 +118,10 @@ public BigQueryReadStub createStub() throws IOException { .getTransportName() .equals(GrpcTransportChannel.getGrpcTransportName())) { return GrpcBigQueryReadStub.create(this); - } else { - throw new UnsupportedOperationException( - "Transport not supported: " + getTransportChannelProvider().getTransportName()); } + throw new UnsupportedOperationException( + String.format( + "Transport not supported: %s", getTransportChannelProvider().getTransportName())); } /** Returns a builder for the default ExecutorProvider for this service. */ @@ -188,14 +189,12 @@ protected BigQueryReadStubSettings(Builder settingsBuilder) throws IOException { /** Builder for BigQueryReadStubSettings. */ public static class Builder extends StubSettings.Builder { private final ImmutableList> unaryMethodSettingsBuilders; - private final UnaryCallSettings.Builder createReadSessionSettings; private final ServerStreamingCallSettings.Builder readRowsSettings; private final UnaryCallSettings.Builder splitReadStreamSettings; - private static final ImmutableMap> RETRYABLE_CODE_DEFINITIONS; @@ -203,36 +202,18 @@ public static class Builder extends StubSettings.Builder> definitions = ImmutableMap.builder(); definitions.put( - "retry_policy_1_codes", - ImmutableSet.copyOf( - Lists.newArrayList( - StatusCode.Code.DEADLINE_EXCEEDED, StatusCode.Code.UNAVAILABLE))); - definitions.put( - "retry_policy_4_codes", - ImmutableSet.copyOf( - Lists.newArrayList( - StatusCode.Code.DEADLINE_EXCEEDED, - StatusCode.Code.UNAVAILABLE, - StatusCode.Code.RESOURCE_EXHAUSTED))); - definitions.put( - "retry_policy_6_codes", + "retry_policy_0_codes", ImmutableSet.copyOf( Lists.newArrayList( StatusCode.Code.DEADLINE_EXCEEDED, StatusCode.Code.UNAVAILABLE))); - definitions.put("no_retry_codes", ImmutableSet.copyOf(Lists.newArrayList())); definitions.put( - "retry_policy_3_codes", - ImmutableSet.copyOf( - Lists.newArrayList( - StatusCode.Code.DEADLINE_EXCEEDED, StatusCode.Code.UNAVAILABLE))); - definitions.put( - "retry_policy_2_codes", + "retry_policy_1_codes", ImmutableSet.copyOf(Lists.newArrayList(StatusCode.Code.UNAVAILABLE))); definitions.put( - "retry_policy_5_codes", + "retry_policy_2_codes", ImmutableSet.copyOf( Lists.newArrayList( - StatusCode.Code.UNAVAILABLE, StatusCode.Code.RESOURCE_EXHAUSTED))); + StatusCode.Code.DEADLINE_EXCEEDED, StatusCode.Code.UNAVAILABLE))); RETRYABLE_CODE_DEFINITIONS = definitions.build(); } @@ -251,29 +232,7 @@ public static class Builder extends StubSettings.Builder>of( createReadSessionSettings, splitReadStreamSettings); - initDefaults(this); } + protected Builder(BigQueryReadStubSettings settings) { + super(settings); + + createReadSessionSettings = settings.createReadSessionSettings.toBuilder(); + readRowsSettings = settings.readRowsSettings.toBuilder(); + splitReadStreamSettings = settings.splitReadStreamSettings.toBuilder(); + + unaryMethodSettingsBuilders = + ImmutableList.>of( + createReadSessionSettings, splitReadStreamSettings); + } + private static Builder createDefault() { - Builder builder = new Builder((ClientContext) null); + Builder builder = new Builder(((ClientContext) null)); + builder.setTransportChannelProvider(defaultTransportChannelProvider()); builder.setCredentialsProvider(defaultCredentialsProviderBuilder().build()); builder.setInternalHeaderProvider(defaultApiClientHeaderProviderBuilder().build()); builder.setEndpoint(getDefaultEndpoint()); + return initDefaults(builder); } private static Builder initDefaults(Builder builder) { - builder .createReadSessionSettings() - .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_1_codes")) - .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_1_params")); + .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) + .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")); builder .readRowsSettings() - .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_2_codes")) - .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_2_params")); + .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_1_codes")) + .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_1_params")); builder .splitReadStreamSettings() - .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_3_codes")) - .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_3_params")); + .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_2_codes")) + .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_2_params")); return builder; } - protected Builder(BigQueryReadStubSettings settings) { - super(settings); - - createReadSessionSettings = settings.createReadSessionSettings.toBuilder(); - readRowsSettings = settings.readRowsSettings.toBuilder(); - splitReadStreamSettings = settings.splitReadStreamSettings.toBuilder(); - - unaryMethodSettingsBuilders = - ImmutableList.>of( - createReadSessionSettings, splitReadStreamSettings); - } - - // NEXT_MAJOR_VER: remove 'throws Exception' + // NEXT_MAJOR_VER: remove 'throws Exception'. /** * Applies the given settings updater function to all of the unary API methods in this service. * diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/stub/BigQueryWriteStub.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/stub/BigQueryWriteStub.java index cc569eeb24..cedc3d4d33 100644 --- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/stub/BigQueryWriteStub.java +++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/stub/BigQueryWriteStub.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,9 +13,9 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.bigquery.storage.v1beta2.stub; -import com.google.api.core.BetaApi; import com.google.api.gax.core.BackgroundResource; import com.google.api.gax.rpc.BidiStreamingCallable; import com.google.api.gax.rpc.UnaryCallable; @@ -32,14 +32,13 @@ import com.google.cloud.bigquery.storage.v1beta2.WriteStream; import javax.annotation.Generated; -// AUTO-GENERATED DOCUMENTATION AND CLASS +// AUTO-GENERATED DOCUMENTATION AND CLASS. /** - * Base stub class for BigQuery Storage API. + * Base stub class for the BigQueryWrite service API. * *

This class is for advanced usage and reflects the underlying API directly. */ @Generated("by gapic-generator") -@BetaApi("A restructuring of stub classes is planned, so this may break in the future") public abstract class BigQueryWriteStub implements BackgroundResource { public UnaryCallable createWriteStreamCallable() { diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/stub/BigQueryWriteStubSettings.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/stub/BigQueryWriteStubSettings.java index 4e945907d9..ebe08eda20 100644 --- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/stub/BigQueryWriteStubSettings.java +++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/stub/BigQueryWriteStubSettings.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,6 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.bigquery.storage.v1beta2.stub; import com.google.api.core.ApiFunction; @@ -51,7 +52,7 @@ import javax.annotation.Generated; import org.threeten.bp.Duration; -// AUTO-GENERATED DOCUMENTATION AND CLASS +// AUTO-GENERATED DOCUMENTATION AND CLASS. /** * Settings class to configure an instance of {@link BigQueryWriteStub}. * @@ -69,29 +70,29 @@ * *

For example, to set the total timeout of createWriteStream to 30 seconds: * - *

- * 
+ * 
{@code
  * BigQueryWriteStubSettings.Builder bigQueryWriteSettingsBuilder =
  *     BigQueryWriteStubSettings.newBuilder();
  * bigQueryWriteSettingsBuilder
  *     .createWriteStreamSettings()
  *     .setRetrySettings(
- *         bigQueryWriteSettingsBuilder.createWriteStreamSettings().getRetrySettings().toBuilder()
+ *         bigQueryWriteSettingsBuilder
+ *             .createWriteStreamSettings()
+ *             .getRetrySettings()
+ *             .toBuilder()
  *             .setTotalTimeout(Duration.ofSeconds(30))
  *             .build());
  * BigQueryWriteStubSettings bigQueryWriteSettings = bigQueryWriteSettingsBuilder.build();
- * 
- * 
+ * }
*/ -@Generated("by gapic-generator") @BetaApi +@Generated("by gapic-generator-java") public class BigQueryWriteStubSettings extends StubSettings { /** The default scopes of the service. */ private static final ImmutableList DEFAULT_SERVICE_SCOPES = ImmutableList.builder() .add("https://www.googleapis.com/auth/bigquery") .add("https://www.googleapis.com/auth/bigquery.insertdata") - .add("https://www.googleapis.com/auth/bigquery.readonly") .add("https://www.googleapis.com/auth/cloud-platform") .build(); @@ -142,10 +143,10 @@ public BigQueryWriteStub createStub() throws IOException { .getTransportName() .equals(GrpcTransportChannel.getGrpcTransportName())) { return GrpcBigQueryWriteStub.create(this); - } else { - throw new UnsupportedOperationException( - "Transport not supported: " + getTransportChannelProvider().getTransportName()); } + throw new UnsupportedOperationException( + String.format( + "Transport not supported: %s", getTransportChannelProvider().getTransportName())); } /** Returns a builder for the default ExecutorProvider for this service. */ @@ -216,7 +217,6 @@ protected BigQueryWriteStubSettings(Builder settingsBuilder) throws IOException /** Builder for BigQueryWriteStubSettings. */ public static class Builder extends StubSettings.Builder { private final ImmutableList> unaryMethodSettingsBuilders; - private final UnaryCallSettings.Builder createWriteStreamSettings; private final StreamingCallSettings.Builder @@ -229,7 +229,6 @@ public static class Builder extends StubSettings.Builder batchCommitWriteStreamsSettings; private final UnaryCallSettings.Builder flushRowsSettings; - private static final ImmutableMap> RETRYABLE_CODE_DEFINITIONS; @@ -237,36 +236,22 @@ public static class Builder extends StubSettings.Builder> definitions = ImmutableMap.builder(); definitions.put( - "retry_policy_1_codes", - ImmutableSet.copyOf( - Lists.newArrayList( - StatusCode.Code.DEADLINE_EXCEEDED, StatusCode.Code.UNAVAILABLE))); - definitions.put( - "retry_policy_4_codes", + "retry_policy_3_codes", ImmutableSet.copyOf( Lists.newArrayList( StatusCode.Code.DEADLINE_EXCEEDED, StatusCode.Code.UNAVAILABLE, StatusCode.Code.RESOURCE_EXHAUSTED))); definitions.put( - "retry_policy_6_codes", - ImmutableSet.copyOf( - Lists.newArrayList( - StatusCode.Code.DEADLINE_EXCEEDED, StatusCode.Code.UNAVAILABLE))); - definitions.put("no_retry_codes", ImmutableSet.copyOf(Lists.newArrayList())); - definitions.put( - "retry_policy_3_codes", + "retry_policy_4_codes", ImmutableSet.copyOf( Lists.newArrayList( - StatusCode.Code.DEADLINE_EXCEEDED, StatusCode.Code.UNAVAILABLE))); - definitions.put( - "retry_policy_2_codes", - ImmutableSet.copyOf(Lists.newArrayList(StatusCode.Code.UNAVAILABLE))); + StatusCode.Code.UNAVAILABLE, StatusCode.Code.RESOURCE_EXHAUSTED))); definitions.put( "retry_policy_5_codes", ImmutableSet.copyOf( Lists.newArrayList( - StatusCode.Code.UNAVAILABLE, StatusCode.Code.RESOURCE_EXHAUSTED))); + StatusCode.Code.DEADLINE_EXCEEDED, StatusCode.Code.UNAVAILABLE))); RETRYABLE_CODE_DEFINITIONS = definitions.build(); } @@ -275,28 +260,6 @@ public static class Builder extends StubSettings.Builder definitions = ImmutableMap.builder(); RetrySettings settings = null; - settings = - RetrySettings.newBuilder() - .setInitialRetryDelay(Duration.ofMillis(100L)) - .setRetryDelayMultiplier(1.3) - .setMaxRetryDelay(Duration.ofMillis(60000L)) - .setInitialRpcTimeout(Duration.ofMillis(600000L)) - .setRpcTimeoutMultiplier(1.0) - .setMaxRpcTimeout(Duration.ofMillis(600000L)) - .setTotalTimeout(Duration.ofMillis(600000L)) - .build(); - definitions.put("retry_policy_1_params", settings); - settings = - RetrySettings.newBuilder() - .setInitialRetryDelay(Duration.ofMillis(100L)) - .setRetryDelayMultiplier(1.3) - .setMaxRetryDelay(Duration.ofMillis(60000L)) - .setInitialRpcTimeout(Duration.ofMillis(600000L)) - .setRpcTimeoutMultiplier(1.0) - .setMaxRpcTimeout(Duration.ofMillis(600000L)) - .setTotalTimeout(Duration.ofMillis(600000L)) - .build(); - definitions.put("retry_policy_6_params", settings); settings = RetrySettings.newBuilder() .setInitialRetryDelay(Duration.ofMillis(100L)) @@ -318,7 +281,7 @@ public static class Builder extends StubSettings.Builder>of( + createWriteStreamSettings, + getWriteStreamSettings, + finalizeWriteStreamSettings, + batchCommitWriteStreamsSettings, + flushRowsSettings); + } + private static Builder createDefault() { - Builder builder = new Builder((ClientContext) null); + Builder builder = new Builder(((ClientContext) null)); + builder.setTransportChannelProvider(defaultTransportChannelProvider()); builder.setCredentialsProvider(defaultCredentialsProviderBuilder().build()); builder.setInternalHeaderProvider(defaultApiClientHeaderProviderBuilder().build()); builder.setEndpoint(getDefaultEndpoint()); + return initDefaults(builder); } private static Builder initDefaults(Builder builder) { - builder .createWriteStreamSettings() - .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_4_codes")) - .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_4_params")); + .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_3_codes")) + .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_3_params")); builder .getWriteStreamSettings() - .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_6_codes")) - .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_6_params")); + .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_5_codes")) + .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_5_params")); builder .finalizeWriteStreamSettings() - .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_6_codes")) - .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_6_params")); + .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_5_codes")) + .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_5_params")); builder .batchCommitWriteStreamsSettings() - .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_6_codes")) - .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_6_params")); + .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_5_codes")) + .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_5_params")); builder .flushRowsSettings() - .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_6_codes")) - .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_6_params")); + .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_5_codes")) + .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_5_params")); return builder; } - protected Builder(BigQueryWriteStubSettings settings) { - super(settings); - - createWriteStreamSettings = settings.createWriteStreamSettings.toBuilder(); - appendRowsSettings = settings.appendRowsSettings.toBuilder(); - getWriteStreamSettings = settings.getWriteStreamSettings.toBuilder(); - finalizeWriteStreamSettings = settings.finalizeWriteStreamSettings.toBuilder(); - batchCommitWriteStreamsSettings = settings.batchCommitWriteStreamsSettings.toBuilder(); - flushRowsSettings = settings.flushRowsSettings.toBuilder(); - - unaryMethodSettingsBuilders = - ImmutableList.>of( - createWriteStreamSettings, - getWriteStreamSettings, - finalizeWriteStreamSettings, - batchCommitWriteStreamsSettings, - flushRowsSettings); - } - - // NEXT_MAJOR_VER: remove 'throws Exception' + // NEXT_MAJOR_VER: remove 'throws Exception'. /** * Applies the given settings updater function to all of the unary API methods in this service. * diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/stub/GrpcBigQueryReadCallableFactory.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/stub/GrpcBigQueryReadCallableFactory.java index a66a898f09..85844b1ed9 100644 --- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/stub/GrpcBigQueryReadCallableFactory.java +++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/stub/GrpcBigQueryReadCallableFactory.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,9 +13,9 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.bigquery.storage.v1beta2.stub; -import com.google.api.core.BetaApi; import com.google.api.gax.grpc.GrpcCallSettings; import com.google.api.gax.grpc.GrpcCallableFactory; import com.google.api.gax.grpc.GrpcStubCallableFactory; @@ -31,18 +31,19 @@ import com.google.api.gax.rpc.StreamingCallSettings; import com.google.api.gax.rpc.UnaryCallSettings; import com.google.api.gax.rpc.UnaryCallable; +import com.google.longrunning.Operation; import com.google.longrunning.stub.OperationsStub; import javax.annotation.Generated; -// AUTO-GENERATED DOCUMENTATION AND CLASS +// AUTO-GENERATED DOCUMENTATION AND CLASS. /** - * gRPC callable factory implementation for BigQuery Storage API. + * gRPC callable factory implementation for the BigQueryRead service API. * *

This class is for advanced usage. */ @Generated("by gapic-generator") -@BetaApi("The surface for use by generated code is not stable yet and may change in the future.") public class GrpcBigQueryReadCallableFactory implements GrpcStubCallableFactory { + @Override public UnaryCallable createUnaryCallable( GrpcCallSettings grpcCallSettings, @@ -55,61 +56,58 @@ public UnaryCallable createUnaryCalla public UnaryCallable createPagedCallable( GrpcCallSettings grpcCallSettings, - PagedCallSettings pagedCallSettings, + PagedCallSettings callSettings, ClientContext clientContext) { - return GrpcCallableFactory.createPagedCallable( - grpcCallSettings, pagedCallSettings, clientContext); + return GrpcCallableFactory.createPagedCallable(grpcCallSettings, callSettings, clientContext); } @Override public UnaryCallable createBatchingCallable( GrpcCallSettings grpcCallSettings, - BatchingCallSettings batchingCallSettings, + BatchingCallSettings callSettings, ClientContext clientContext) { return GrpcCallableFactory.createBatchingCallable( - grpcCallSettings, batchingCallSettings, clientContext); + grpcCallSettings, callSettings, clientContext); } - @BetaApi( - "The surface for long-running operations is not stable yet and may change in the future.") @Override public OperationCallable createOperationCallable( - GrpcCallSettings grpcCallSettings, - OperationCallSettings operationCallSettings, + GrpcCallSettings grpcCallSettings, + OperationCallSettings callSettings, ClientContext clientContext, OperationsStub operationsStub) { return GrpcCallableFactory.createOperationCallable( - grpcCallSettings, operationCallSettings, clientContext, operationsStub); + grpcCallSettings, callSettings, clientContext, operationsStub); } @Override public BidiStreamingCallable createBidiStreamingCallable( GrpcCallSettings grpcCallSettings, - StreamingCallSettings streamingCallSettings, + StreamingCallSettings callSettings, ClientContext clientContext) { return GrpcCallableFactory.createBidiStreamingCallable( - grpcCallSettings, streamingCallSettings, clientContext); + grpcCallSettings, callSettings, clientContext); } @Override public ServerStreamingCallable createServerStreamingCallable( GrpcCallSettings grpcCallSettings, - ServerStreamingCallSettings streamingCallSettings, + ServerStreamingCallSettings callSettings, ClientContext clientContext) { return GrpcCallableFactory.createServerStreamingCallable( - grpcCallSettings, streamingCallSettings, clientContext); + grpcCallSettings, callSettings, clientContext); } @Override public ClientStreamingCallable createClientStreamingCallable( GrpcCallSettings grpcCallSettings, - StreamingCallSettings streamingCallSettings, + StreamingCallSettings callSettings, ClientContext clientContext) { return GrpcCallableFactory.createClientStreamingCallable( - grpcCallSettings, streamingCallSettings, clientContext); + grpcCallSettings, callSettings, clientContext); } } diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/stub/GrpcBigQueryReadStub.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/stub/GrpcBigQueryReadStub.java index 13f589f0af..158fe6200c 100644 --- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/stub/GrpcBigQueryReadStub.java +++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/stub/GrpcBigQueryReadStub.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,9 +13,9 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.bigquery.storage.v1beta2.stub; -import com.google.api.core.BetaApi; import com.google.api.gax.core.BackgroundResource; import com.google.api.gax.core.BackgroundResourceAggregation; import com.google.api.gax.grpc.GrpcCallSettings; @@ -31,6 +31,7 @@ import com.google.cloud.bigquery.storage.v1beta2.SplitReadStreamRequest; import com.google.cloud.bigquery.storage.v1beta2.SplitReadStreamResponse; import com.google.common.collect.ImmutableMap; +import com.google.longrunning.stub.GrpcOperationsStub; import io.grpc.MethodDescriptor; import io.grpc.protobuf.ProtoUtils; import java.io.IOException; @@ -38,16 +39,14 @@ import java.util.concurrent.TimeUnit; import javax.annotation.Generated; -// AUTO-GENERATED DOCUMENTATION AND CLASS +// AUTO-GENERATED DOCUMENTATION AND CLASS. /** - * gRPC stub implementation for BigQuery Storage API. + * gRPC stub implementation for the BigQueryRead service API. * *

This class is for advanced usage and reflects the underlying API directly. */ -@Generated("by gapic-generator") -@BetaApi("A restructuring of stub classes is planned, so this may break in the future") +@Generated("by gapic-generator-java") public class GrpcBigQueryReadStub extends BigQueryReadStub { - private static final MethodDescriptor createReadSessionMethodDescriptor = MethodDescriptor.newBuilder() @@ -58,6 +57,7 @@ public class GrpcBigQueryReadStub extends BigQueryReadStub { ProtoUtils.marshaller(CreateReadSessionRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(ReadSession.getDefaultInstance())) .build(); + private static final MethodDescriptor readRowsMethodDescriptor = MethodDescriptor.newBuilder() @@ -66,6 +66,7 @@ public class GrpcBigQueryReadStub extends BigQueryReadStub { .setRequestMarshaller(ProtoUtils.marshaller(ReadRowsRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(ReadRowsResponse.getDefaultInstance())) .build(); + private static final MethodDescriptor splitReadStreamMethodDescriptor = MethodDescriptor.newBuilder() @@ -78,13 +79,13 @@ public class GrpcBigQueryReadStub extends BigQueryReadStub { ProtoUtils.marshaller(SplitReadStreamResponse.getDefaultInstance())) .build(); - private final BackgroundResource backgroundResources; - private final UnaryCallable createReadSessionCallable; private final ServerStreamingCallable readRowsCallable; private final UnaryCallable splitReadStreamCallable; + private final BackgroundResource backgroundResources; + private final GrpcOperationsStub operationsStub; private final GrpcStubCallableFactory callableFactory; public static final GrpcBigQueryReadStub create(BigQueryReadStubSettings settings) @@ -102,27 +103,18 @@ public static final GrpcBigQueryReadStub create( BigQueryReadStubSettings.newBuilder().build(), clientContext, callableFactory); } - /** - * Constructs an instance of GrpcBigQueryReadStub, using the given settings. This is protected so - * that it is easy to make a subclass, but otherwise, the static factory methods should be - * preferred. - */ protected GrpcBigQueryReadStub(BigQueryReadStubSettings settings, ClientContext clientContext) throws IOException { this(settings, clientContext, new GrpcBigQueryReadCallableFactory()); } - /** - * Constructs an instance of GrpcBigQueryReadStub, using the given settings. This is protected so - * that it is easy to make a subclass, but otherwise, the static factory methods should be - * preferred. - */ protected GrpcBigQueryReadStub( BigQueryReadStubSettings settings, ClientContext clientContext, GrpcStubCallableFactory callableFactory) throws IOException { this.callableFactory = callableFactory; + this.operationsStub = GrpcOperationsStub.create(clientContext, callableFactory); GrpcCallSettings createReadSessionTransportSettings = GrpcCallSettings.newBuilder() @@ -178,7 +170,12 @@ public Map extract(SplitReadStreamRequest request) { callableFactory.createUnaryCallable( splitReadStreamTransportSettings, settings.splitReadStreamSettings(), clientContext); - backgroundResources = new BackgroundResourceAggregation(clientContext.getBackgroundResources()); + this.backgroundResources = + new BackgroundResourceAggregation(clientContext.getBackgroundResources()); + } + + public GrpcOperationsStub getOperationsStub() { + return operationsStub; } public UnaryCallable createReadSessionCallable() { diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/stub/GrpcBigQueryWriteCallableFactory.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/stub/GrpcBigQueryWriteCallableFactory.java index 985997ff97..0831c1c84e 100644 --- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/stub/GrpcBigQueryWriteCallableFactory.java +++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/stub/GrpcBigQueryWriteCallableFactory.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,9 +13,9 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.bigquery.storage.v1beta2.stub; -import com.google.api.core.BetaApi; import com.google.api.gax.grpc.GrpcCallSettings; import com.google.api.gax.grpc.GrpcCallableFactory; import com.google.api.gax.grpc.GrpcStubCallableFactory; @@ -31,18 +31,19 @@ import com.google.api.gax.rpc.StreamingCallSettings; import com.google.api.gax.rpc.UnaryCallSettings; import com.google.api.gax.rpc.UnaryCallable; +import com.google.longrunning.Operation; import com.google.longrunning.stub.OperationsStub; import javax.annotation.Generated; -// AUTO-GENERATED DOCUMENTATION AND CLASS +// AUTO-GENERATED DOCUMENTATION AND CLASS. /** - * gRPC callable factory implementation for BigQuery Storage API. + * gRPC callable factory implementation for the BigQueryWrite service API. * *

This class is for advanced usage. */ @Generated("by gapic-generator") -@BetaApi("The surface for use by generated code is not stable yet and may change in the future.") public class GrpcBigQueryWriteCallableFactory implements GrpcStubCallableFactory { + @Override public UnaryCallable createUnaryCallable( GrpcCallSettings grpcCallSettings, @@ -55,61 +56,58 @@ public UnaryCallable createUnaryCalla public UnaryCallable createPagedCallable( GrpcCallSettings grpcCallSettings, - PagedCallSettings pagedCallSettings, + PagedCallSettings callSettings, ClientContext clientContext) { - return GrpcCallableFactory.createPagedCallable( - grpcCallSettings, pagedCallSettings, clientContext); + return GrpcCallableFactory.createPagedCallable(grpcCallSettings, callSettings, clientContext); } @Override public UnaryCallable createBatchingCallable( GrpcCallSettings grpcCallSettings, - BatchingCallSettings batchingCallSettings, + BatchingCallSettings callSettings, ClientContext clientContext) { return GrpcCallableFactory.createBatchingCallable( - grpcCallSettings, batchingCallSettings, clientContext); + grpcCallSettings, callSettings, clientContext); } - @BetaApi( - "The surface for long-running operations is not stable yet and may change in the future.") @Override public OperationCallable createOperationCallable( - GrpcCallSettings grpcCallSettings, - OperationCallSettings operationCallSettings, + GrpcCallSettings grpcCallSettings, + OperationCallSettings callSettings, ClientContext clientContext, OperationsStub operationsStub) { return GrpcCallableFactory.createOperationCallable( - grpcCallSettings, operationCallSettings, clientContext, operationsStub); + grpcCallSettings, callSettings, clientContext, operationsStub); } @Override public BidiStreamingCallable createBidiStreamingCallable( GrpcCallSettings grpcCallSettings, - StreamingCallSettings streamingCallSettings, + StreamingCallSettings callSettings, ClientContext clientContext) { return GrpcCallableFactory.createBidiStreamingCallable( - grpcCallSettings, streamingCallSettings, clientContext); + grpcCallSettings, callSettings, clientContext); } @Override public ServerStreamingCallable createServerStreamingCallable( GrpcCallSettings grpcCallSettings, - ServerStreamingCallSettings streamingCallSettings, + ServerStreamingCallSettings callSettings, ClientContext clientContext) { return GrpcCallableFactory.createServerStreamingCallable( - grpcCallSettings, streamingCallSettings, clientContext); + grpcCallSettings, callSettings, clientContext); } @Override public ClientStreamingCallable createClientStreamingCallable( GrpcCallSettings grpcCallSettings, - StreamingCallSettings streamingCallSettings, + StreamingCallSettings callSettings, ClientContext clientContext) { return GrpcCallableFactory.createClientStreamingCallable( - grpcCallSettings, streamingCallSettings, clientContext); + grpcCallSettings, callSettings, clientContext); } } diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/stub/GrpcBigQueryWriteStub.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/stub/GrpcBigQueryWriteStub.java index 262b7557f9..c1fd66050f 100644 --- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/stub/GrpcBigQueryWriteStub.java +++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/stub/GrpcBigQueryWriteStub.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,9 +13,9 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.bigquery.storage.v1beta2.stub; -import com.google.api.core.BetaApi; import com.google.api.gax.core.BackgroundResource; import com.google.api.gax.core.BackgroundResourceAggregation; import com.google.api.gax.grpc.GrpcCallSettings; @@ -36,6 +36,7 @@ import com.google.cloud.bigquery.storage.v1beta2.GetWriteStreamRequest; import com.google.cloud.bigquery.storage.v1beta2.WriteStream; import com.google.common.collect.ImmutableMap; +import com.google.longrunning.stub.GrpcOperationsStub; import io.grpc.MethodDescriptor; import io.grpc.protobuf.ProtoUtils; import java.io.IOException; @@ -43,16 +44,14 @@ import java.util.concurrent.TimeUnit; import javax.annotation.Generated; -// AUTO-GENERATED DOCUMENTATION AND CLASS +// AUTO-GENERATED DOCUMENTATION AND CLASS. /** - * gRPC stub implementation for BigQuery Storage API. + * gRPC stub implementation for the BigQueryWrite service API. * *

This class is for advanced usage and reflects the underlying API directly. */ -@Generated("by gapic-generator") -@BetaApi("A restructuring of stub classes is planned, so this may break in the future") +@Generated("by gapic-generator-java") public class GrpcBigQueryWriteStub extends BigQueryWriteStub { - private static final MethodDescriptor createWriteStreamMethodDescriptor = MethodDescriptor.newBuilder() @@ -63,6 +62,7 @@ public class GrpcBigQueryWriteStub extends BigQueryWriteStub { ProtoUtils.marshaller(CreateWriteStreamRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(WriteStream.getDefaultInstance())) .build(); + private static final MethodDescriptor appendRowsMethodDescriptor = MethodDescriptor.newBuilder() @@ -71,6 +71,7 @@ public class GrpcBigQueryWriteStub extends BigQueryWriteStub { .setRequestMarshaller(ProtoUtils.marshaller(AppendRowsRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(AppendRowsResponse.getDefaultInstance())) .build(); + private static final MethodDescriptor getWriteStreamMethodDescriptor = MethodDescriptor.newBuilder() @@ -81,6 +82,7 @@ public class GrpcBigQueryWriteStub extends BigQueryWriteStub { ProtoUtils.marshaller(GetWriteStreamRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(WriteStream.getDefaultInstance())) .build(); + private static final MethodDescriptor finalizeWriteStreamMethodDescriptor = MethodDescriptor.newBuilder() @@ -92,6 +94,7 @@ public class GrpcBigQueryWriteStub extends BigQueryWriteStub { .setResponseMarshaller( ProtoUtils.marshaller(FinalizeWriteStreamResponse.getDefaultInstance())) .build(); + private static final MethodDescriptor< BatchCommitWriteStreamsRequest, BatchCommitWriteStreamsResponse> batchCommitWriteStreamsMethodDescriptor = @@ -105,6 +108,7 @@ public class GrpcBigQueryWriteStub extends BigQueryWriteStub { .setResponseMarshaller( ProtoUtils.marshaller(BatchCommitWriteStreamsResponse.getDefaultInstance())) .build(); + private static final MethodDescriptor flushRowsMethodDescriptor = MethodDescriptor.newBuilder() @@ -114,8 +118,6 @@ public class GrpcBigQueryWriteStub extends BigQueryWriteStub { .setResponseMarshaller(ProtoUtils.marshaller(FlushRowsResponse.getDefaultInstance())) .build(); - private final BackgroundResource backgroundResources; - private final UnaryCallable createWriteStreamCallable; private final BidiStreamingCallable appendRowsCallable; private final UnaryCallable getWriteStreamCallable; @@ -125,6 +127,8 @@ public class GrpcBigQueryWriteStub extends BigQueryWriteStub { batchCommitWriteStreamsCallable; private final UnaryCallable flushRowsCallable; + private final BackgroundResource backgroundResources; + private final GrpcOperationsStub operationsStub; private final GrpcStubCallableFactory callableFactory; public static final GrpcBigQueryWriteStub create(BigQueryWriteStubSettings settings) @@ -142,27 +146,18 @@ public static final GrpcBigQueryWriteStub create( BigQueryWriteStubSettings.newBuilder().build(), clientContext, callableFactory); } - /** - * Constructs an instance of GrpcBigQueryWriteStub, using the given settings. This is protected so - * that it is easy to make a subclass, but otherwise, the static factory methods should be - * preferred. - */ protected GrpcBigQueryWriteStub(BigQueryWriteStubSettings settings, ClientContext clientContext) throws IOException { this(settings, clientContext, new GrpcBigQueryWriteCallableFactory()); } - /** - * Constructs an instance of GrpcBigQueryWriteStub, using the given settings. This is protected so - * that it is easy to make a subclass, but otherwise, the static factory methods should be - * preferred. - */ protected GrpcBigQueryWriteStub( BigQueryWriteStubSettings settings, ClientContext clientContext, GrpcStubCallableFactory callableFactory) throws IOException { this.callableFactory = callableFactory; + this.operationsStub = GrpcOperationsStub.create(clientContext, callableFactory); GrpcCallSettings createWriteStreamTransportSettings = GrpcCallSettings.newBuilder() @@ -180,6 +175,15 @@ public Map extract(CreateWriteStreamRequest request) { GrpcCallSettings appendRowsTransportSettings = GrpcCallSettings.newBuilder() .setMethodDescriptor(appendRowsMethodDescriptor) + .setParamsExtractor( + new RequestParamsExtractor() { + @Override + public Map extract(AppendRowsRequest request) { + ImmutableMap.Builder params = ImmutableMap.builder(); + params.put("write_stream", String.valueOf(request.getWriteStream())); + return params.build(); + } + }) .build(); GrpcCallSettings getWriteStreamTransportSettings = GrpcCallSettings.newBuilder() @@ -262,7 +266,12 @@ public Map extract(FlushRowsRequest request) { callableFactory.createUnaryCallable( flushRowsTransportSettings, settings.flushRowsSettings(), clientContext); - backgroundResources = new BackgroundResourceAggregation(clientContext.getBackgroundResources()); + this.backgroundResources = + new BackgroundResourceAggregation(clientContext.getBackgroundResources()); + } + + public GrpcOperationsStub getOperationsStub() { + return operationsStub; } public UnaryCallable createWriteStreamCallable() { diff --git a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1/BaseBigQueryReadClientTest.java b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1/BaseBigQueryReadClientTest.java index 1217dca250..647e921610 100644 --- a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1/BaseBigQueryReadClientTest.java +++ b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1/BaseBigQueryReadClientTest.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,6 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.bigquery.storage.v1; import com.google.api.gax.core.NoCredentialsProvider; @@ -26,13 +27,15 @@ import com.google.api.gax.rpc.ServerStreamingCallable; import com.google.api.gax.rpc.StatusCode; import com.google.protobuf.AbstractMessage; -import io.grpc.Status; +import com.google.protobuf.Timestamp; import io.grpc.StatusRuntimeException; import java.io.IOException; +import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.UUID; import java.util.concurrent.ExecutionException; +import javax.annotation.Generated; import org.junit.After; import org.junit.AfterClass; import org.junit.Assert; @@ -40,31 +43,31 @@ import org.junit.BeforeClass; import org.junit.Test; -@javax.annotation.Generated("by GAPIC") +@Generated("by gapic-generator-java") public class BaseBigQueryReadClientTest { private static MockBigQueryRead mockBigQueryRead; - private static MockServiceHelper serviceHelper; + private static MockServiceHelper mockServiceHelper; private BaseBigQueryReadClient client; private LocalChannelProvider channelProvider; @BeforeClass public static void startStaticServer() { mockBigQueryRead = new MockBigQueryRead(); - serviceHelper = + mockServiceHelper = new MockServiceHelper( UUID.randomUUID().toString(), Arrays.asList(mockBigQueryRead)); - serviceHelper.start(); + mockServiceHelper.start(); } @AfterClass public static void stopServer() { - serviceHelper.stop(); + mockServiceHelper.stop(); } @Before public void setUp() throws IOException { - serviceHelper.reset(); - channelProvider = serviceHelper.createChannelProvider(); + mockServiceHelper.reset(); + channelProvider = mockServiceHelper.createChannelProvider(); BaseBigQueryReadSettings settings = BaseBigQueryReadSettings.newBuilder() .setTransportChannelProvider(channelProvider) @@ -79,12 +82,14 @@ public void tearDown() throws Exception { } @Test - @SuppressWarnings("all") - public void createReadSessionTest() { - ReadSessionName name = ReadSessionName.of("[PROJECT]", "[LOCATION]", "[SESSION]"); - TableName table = TableName.of("[PROJECT]", "[DATASET]", "[TABLE]"); + public void createReadSessionTest() throws Exception { ReadSession expectedResponse = - ReadSession.newBuilder().setName(name.toString()).setTable(table.toString()).build(); + ReadSession.newBuilder() + .setName("name3373707") + .setExpireTime(Timestamp.newBuilder().build()) + .setTable("table110115790") + .addAllStreams(new ArrayList()) + .build(); mockBigQueryRead.addResponse(expectedResponse); ProjectName parent = ProjectName.of("[PROJECT]"); @@ -96,9 +101,9 @@ public void createReadSessionTest() { List actualRequests = mockBigQueryRead.getRequests(); Assert.assertEquals(1, actualRequests.size()); - CreateReadSessionRequest actualRequest = (CreateReadSessionRequest) actualRequests.get(0); + CreateReadSessionRequest actualRequest = ((CreateReadSessionRequest) actualRequests.get(0)); - Assert.assertEquals(parent, ProjectName.parse(actualRequest.getParent())); + Assert.assertEquals(parent.toString(), actualRequest.getParent()); Assert.assertEquals(readSession, actualRequest.getReadSession()); Assert.assertEquals(maxStreamCount, actualRequest.getMaxStreamCount()); Assert.assertTrue( @@ -108,33 +113,83 @@ public void createReadSessionTest() { } @Test - @SuppressWarnings("all") public void createReadSessionExceptionTest() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockBigQueryRead.addException(exception); try { ProjectName parent = ProjectName.of("[PROJECT]"); ReadSession readSession = ReadSession.newBuilder().build(); int maxStreamCount = 940837515; + client.createReadSession(parent, readSession, maxStreamCount); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception. + } + } + + @Test + public void createReadSessionTest2() throws Exception { + ReadSession expectedResponse = + ReadSession.newBuilder() + .setName("name3373707") + .setExpireTime(Timestamp.newBuilder().build()) + .setTable("table110115790") + .addAllStreams(new ArrayList()) + .build(); + mockBigQueryRead.addResponse(expectedResponse); + + String parent = "parent-995424086"; + ReadSession readSession = ReadSession.newBuilder().build(); + int maxStreamCount = 940837515; + + ReadSession actualResponse = client.createReadSession(parent, readSession, maxStreamCount); + Assert.assertEquals(expectedResponse, actualResponse); + + List actualRequests = mockBigQueryRead.getRequests(); + Assert.assertEquals(1, actualRequests.size()); + CreateReadSessionRequest actualRequest = ((CreateReadSessionRequest) actualRequests.get(0)); + Assert.assertEquals(parent, actualRequest.getParent()); + Assert.assertEquals(readSession, actualRequest.getReadSession()); + Assert.assertEquals(maxStreamCount, actualRequest.getMaxStreamCount()); + Assert.assertTrue( + channelProvider.isHeaderSent( + ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), + GaxGrpcProperties.getDefaultApiClientHeaderPattern())); + } + + @Test + public void createReadSessionExceptionTest2() throws Exception { + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); + mockBigQueryRead.addException(exception); + + try { + String parent = "parent-995424086"; + ReadSession readSession = ReadSession.newBuilder().build(); + int maxStreamCount = 940837515; client.createReadSession(parent, readSession, maxStreamCount); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { - // Expected exception + // Expected exception. } } @Test - @SuppressWarnings("all") public void readRowsTest() throws Exception { - long rowCount = 1340416618L; - ReadRowsResponse expectedResponse = ReadRowsResponse.newBuilder().setRowCount(rowCount).build(); + ReadRowsResponse expectedResponse = + ReadRowsResponse.newBuilder() + .setRowCount(1340416618) + .setStats(StreamStats.newBuilder().build()) + .setThrottleState(ThrottleState.newBuilder().build()) + .build(); mockBigQueryRead.addResponse(expectedResponse); - ReadStreamName readStream = - ReadStreamName.of("[PROJECT]", "[LOCATION]", "[SESSION]", "[STREAM]"); ReadRowsRequest request = - ReadRowsRequest.newBuilder().setReadStream(readStream.toString()).build(); + ReadRowsRequest.newBuilder() + .setReadStream( + ReadStreamName.of("[PROJECT]", "[LOCATION]", "[SESSION]", "[STREAM]").toString()) + .setOffset(-1019779949) + .build(); MockStreamObserver responseObserver = new MockStreamObserver<>(); @@ -147,14 +202,15 @@ public void readRowsTest() throws Exception { } @Test - @SuppressWarnings("all") public void readRowsExceptionTest() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockBigQueryRead.addException(exception); - ReadStreamName readStream = - ReadStreamName.of("[PROJECT]", "[LOCATION]", "[SESSION]", "[STREAM]"); ReadRowsRequest request = - ReadRowsRequest.newBuilder().setReadStream(readStream.toString()).build(); + ReadRowsRequest.newBuilder() + .setReadStream( + ReadStreamName.of("[PROJECT]", "[LOCATION]", "[SESSION]", "[STREAM]").toString()) + .setOffset(-1019779949) + .build(); MockStreamObserver responseObserver = new MockStreamObserver<>(); @@ -166,29 +222,36 @@ public void readRowsExceptionTest() throws Exception { Assert.fail("No exception thrown"); } catch (ExecutionException e) { Assert.assertTrue(e.getCause() instanceof InvalidArgumentException); - InvalidArgumentException apiException = (InvalidArgumentException) e.getCause(); + InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause()); Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode()); } } @Test - @SuppressWarnings("all") - public void splitReadStreamTest() { - SplitReadStreamResponse expectedResponse = SplitReadStreamResponse.newBuilder().build(); + public void splitReadStreamTest() throws Exception { + SplitReadStreamResponse expectedResponse = + SplitReadStreamResponse.newBuilder() + .setPrimaryStream(ReadStream.newBuilder().build()) + .setRemainderStream(ReadStream.newBuilder().build()) + .build(); mockBigQueryRead.addResponse(expectedResponse); - ReadStreamName name = ReadStreamName.of("[PROJECT]", "[LOCATION]", "[SESSION]", "[STREAM]"); SplitReadStreamRequest request = - SplitReadStreamRequest.newBuilder().setName(name.toString()).build(); + SplitReadStreamRequest.newBuilder() + .setName( + ReadStreamName.of("[PROJECT]", "[LOCATION]", "[SESSION]", "[STREAM]").toString()) + .setFraction(-1653751294) + .build(); SplitReadStreamResponse actualResponse = client.splitReadStream(request); Assert.assertEquals(expectedResponse, actualResponse); List actualRequests = mockBigQueryRead.getRequests(); Assert.assertEquals(1, actualRequests.size()); - SplitReadStreamRequest actualRequest = (SplitReadStreamRequest) actualRequests.get(0); + SplitReadStreamRequest actualRequest = ((SplitReadStreamRequest) actualRequests.get(0)); - Assert.assertEquals(name, ReadStreamName.parse(actualRequest.getName())); + Assert.assertEquals(request.getName(), actualRequest.getName()); + Assert.assertEquals(request.getFraction(), actualRequest.getFraction(), 0.0001); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), @@ -196,20 +259,21 @@ public void splitReadStreamTest() { } @Test - @SuppressWarnings("all") public void splitReadStreamExceptionTest() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockBigQueryRead.addException(exception); try { - ReadStreamName name = ReadStreamName.of("[PROJECT]", "[LOCATION]", "[SESSION]", "[STREAM]"); SplitReadStreamRequest request = - SplitReadStreamRequest.newBuilder().setName(name.toString()).build(); - + SplitReadStreamRequest.newBuilder() + .setName( + ReadStreamName.of("[PROJECT]", "[LOCATION]", "[SESSION]", "[STREAM]").toString()) + .setFraction(-1653751294) + .build(); client.splitReadStream(request); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { - // Expected exception + // Expected exception. } } } diff --git a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1/MockBigQueryRead.java b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1/MockBigQueryRead.java index 6c578b0d17..d4972d28a7 100644 --- a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1/MockBigQueryRead.java +++ b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1/MockBigQueryRead.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,6 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.bigquery.storage.v1; import com.google.api.core.BetaApi; @@ -20,9 +21,10 @@ import com.google.protobuf.AbstractMessage; import io.grpc.ServerServiceDefinition; import java.util.List; +import javax.annotation.Generated; -@javax.annotation.Generated("by GAPIC") @BetaApi +@Generated("by gapic-generator-java") public class MockBigQueryRead implements MockGrpcService { private final MockBigQueryReadImpl serviceImpl; diff --git a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1/MockBigQueryReadImpl.java b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1/MockBigQueryReadImpl.java index b6e022ac6f..21e64df693 100644 --- a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1/MockBigQueryReadImpl.java +++ b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1/MockBigQueryReadImpl.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,6 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.bigquery.storage.v1; import com.google.api.core.BetaApi; @@ -23,9 +24,10 @@ import java.util.LinkedList; import java.util.List; import java.util.Queue; +import javax.annotation.Generated; -@javax.annotation.Generated("by GAPIC") @BetaApi +@Generated("by gapic-generator-java") public class MockBigQueryReadImpl extends BigQueryReadImplBase { private List requests; private Queue responses; @@ -62,10 +64,10 @@ public void createReadSession( Object response = responses.remove(); if (response instanceof ReadSession) { requests.add(request); - responseObserver.onNext((ReadSession) response); + responseObserver.onNext(((ReadSession) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { - responseObserver.onError((Exception) response); + responseObserver.onError(((Exception) response)); } else { responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); } @@ -76,10 +78,10 @@ public void readRows(ReadRowsRequest request, StreamObserver r Object response = responses.remove(); if (response instanceof ReadRowsResponse) { requests.add(request); - responseObserver.onNext((ReadRowsResponse) response); + responseObserver.onNext(((ReadRowsResponse) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { - responseObserver.onError((Exception) response); + responseObserver.onError(((Exception) response)); } else { responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); } @@ -91,10 +93,10 @@ public void splitReadStream( Object response = responses.remove(); if (response instanceof SplitReadStreamResponse) { requests.add(request); - responseObserver.onNext((SplitReadStreamResponse) response); + responseObserver.onNext(((SplitReadStreamResponse) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { - responseObserver.onError((Exception) response); + responseObserver.onError(((Exception) response)); } else { responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); } diff --git a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1alpha2/BigQueryWriteClientTest.java b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1alpha2/BigQueryWriteClientTest.java index f035c493f5..9698758a03 100644 --- a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1alpha2/BigQueryWriteClientTest.java +++ b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1alpha2/BigQueryWriteClientTest.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,6 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.bigquery.storage.v1alpha2; import com.google.api.gax.core.NoCredentialsProvider; @@ -26,25 +27,15 @@ import com.google.api.gax.rpc.BidiStreamingCallable; import com.google.api.gax.rpc.InvalidArgumentException; import com.google.api.gax.rpc.StatusCode; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.AppendRowsRequest; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.AppendRowsResponse; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.BatchCommitWriteStreamsRequest; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.BatchCommitWriteStreamsResponse; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.CreateWriteStreamRequest; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.FinalizeWriteStreamRequest; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.FinalizeWriteStreamResponse; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.FlushRowsRequest; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.FlushRowsResponse; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.GetWriteStreamRequest; -import com.google.cloud.bigquery.storage.v1alpha2.Stream.WriteStream; import com.google.protobuf.AbstractMessage; -import io.grpc.Status; +import com.google.protobuf.Int64Value; import io.grpc.StatusRuntimeException; import java.io.IOException; import java.util.Arrays; import java.util.List; import java.util.UUID; import java.util.concurrent.ExecutionException; +import javax.annotation.Generated; import org.junit.After; import org.junit.AfterClass; import org.junit.Assert; @@ -52,31 +43,31 @@ import org.junit.BeforeClass; import org.junit.Test; -@javax.annotation.Generated("by GAPIC") +@Generated("by gapic-generator-java") public class BigQueryWriteClientTest { - private static MockBigQueryWrite mockBigQueryWrite; - private static MockServiceHelper serviceHelper; + private static MockServiceHelper mockServiceHelper; private BigQueryWriteClient client; private LocalChannelProvider channelProvider; + private static MockBigQueryWrite mockBigQueryWrite; @BeforeClass public static void startStaticServer() { mockBigQueryWrite = new MockBigQueryWrite(); - serviceHelper = + mockServiceHelper = new MockServiceHelper( UUID.randomUUID().toString(), Arrays.asList(mockBigQueryWrite)); - serviceHelper.start(); + mockServiceHelper.start(); } @AfterClass public static void stopServer() { - serviceHelper.stop(); + mockServiceHelper.stop(); } @Before public void setUp() throws IOException { - serviceHelper.reset(); - channelProvider = serviceHelper.createChannelProvider(); + mockServiceHelper.reset(); + channelProvider = mockServiceHelper.createChannelProvider(); BigQueryWriteSettings settings = BigQueryWriteSettings.newBuilder() .setTransportChannelProvider(channelProvider) @@ -91,25 +82,22 @@ public void tearDown() throws Exception { } @Test - @SuppressWarnings("all") - public void createWriteStreamTest() { - WriteStreamName name = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]"); - String externalId = "externalId-1153075697"; - WriteStream expectedResponse = - WriteStream.newBuilder().setName(name.toString()).setExternalId(externalId).build(); + public void createWriteStreamTest() throws Exception { + Stream.WriteStream expectedResponse = Stream.WriteStream.newBuilder().build(); mockBigQueryWrite.addResponse(expectedResponse); TableName parent = TableName.of("[PROJECT]", "[DATASET]", "[TABLE]"); - WriteStream writeStream = WriteStream.newBuilder().build(); + Stream.WriteStream writeStream = Stream.WriteStream.newBuilder().build(); - WriteStream actualResponse = client.createWriteStream(parent, writeStream); + Stream.WriteStream actualResponse = client.createWriteStream(parent, writeStream); Assert.assertEquals(expectedResponse, actualResponse); List actualRequests = mockBigQueryWrite.getRequests(); Assert.assertEquals(1, actualRequests.size()); - CreateWriteStreamRequest actualRequest = (CreateWriteStreamRequest) actualRequests.get(0); + Storage.CreateWriteStreamRequest actualRequest = + ((Storage.CreateWriteStreamRequest) actualRequests.get(0)); - Assert.assertEquals(parent, TableName.parse(actualRequest.getParent())); + Assert.assertEquals(parent.toString(), actualRequest.getParent()); Assert.assertEquals(writeStream, actualRequest.getWriteStream()); Assert.assertTrue( channelProvider.isHeaderSent( @@ -118,96 +106,133 @@ public void createWriteStreamTest() { } @Test - @SuppressWarnings("all") public void createWriteStreamExceptionTest() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockBigQueryWrite.addException(exception); try { TableName parent = TableName.of("[PROJECT]", "[DATASET]", "[TABLE]"); - WriteStream writeStream = WriteStream.newBuilder().build(); + Stream.WriteStream writeStream = Stream.WriteStream.newBuilder().build(); + client.createWriteStream(parent, writeStream); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception. + } + } + @Test + public void createWriteStreamTest2() throws Exception { + Stream.WriteStream expectedResponse = Stream.WriteStream.newBuilder().build(); + mockBigQueryWrite.addResponse(expectedResponse); + + String parent = "parent-995424086"; + Stream.WriteStream writeStream = Stream.WriteStream.newBuilder().build(); + + Stream.WriteStream actualResponse = client.createWriteStream(parent, writeStream); + Assert.assertEquals(expectedResponse, actualResponse); + + List actualRequests = mockBigQueryWrite.getRequests(); + Assert.assertEquals(1, actualRequests.size()); + Storage.CreateWriteStreamRequest actualRequest = + ((Storage.CreateWriteStreamRequest) actualRequests.get(0)); + + Assert.assertEquals(parent, actualRequest.getParent()); + Assert.assertEquals(writeStream, actualRequest.getWriteStream()); + Assert.assertTrue( + channelProvider.isHeaderSent( + ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), + GaxGrpcProperties.getDefaultApiClientHeaderPattern())); + } + + @Test + public void createWriteStreamExceptionTest2() throws Exception { + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); + mockBigQueryWrite.addException(exception); + + try { + String parent = "parent-995424086"; + Stream.WriteStream writeStream = Stream.WriteStream.newBuilder().build(); client.createWriteStream(parent, writeStream); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { - // Expected exception + // Expected exception. } } @Test - @SuppressWarnings("all") public void appendRowsTest() throws Exception { - long offset = 1019779949L; - AppendRowsResponse expectedResponse = AppendRowsResponse.newBuilder().setOffset(offset).build(); + Storage.AppendRowsResponse expectedResponse = Storage.AppendRowsResponse.newBuilder().build(); mockBigQueryWrite.addResponse(expectedResponse); - WriteStreamName writeStream = - WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]"); - AppendRowsRequest request = - AppendRowsRequest.newBuilder().setWriteStream(writeStream.toString()).build(); + Storage.AppendRowsRequest request = + Storage.AppendRowsRequest.newBuilder() + .setWriteStream( + WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]").toString()) + .setOffset(Int64Value.newBuilder().build()) + .setIgnoreUnknownFields(true) + .build(); - MockStreamObserver responseObserver = new MockStreamObserver<>(); + MockStreamObserver responseObserver = new MockStreamObserver<>(); - BidiStreamingCallable callable = + BidiStreamingCallable callable = client.appendRowsCallable(); - ApiStreamObserver requestObserver = + ApiStreamObserver requestObserver = callable.bidiStreamingCall(responseObserver); requestObserver.onNext(request); requestObserver.onCompleted(); - List actualResponses = responseObserver.future().get(); + List actualResponses = responseObserver.future().get(); Assert.assertEquals(1, actualResponses.size()); Assert.assertEquals(expectedResponse, actualResponses.get(0)); } @Test - @SuppressWarnings("all") public void appendRowsExceptionTest() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockBigQueryWrite.addException(exception); - WriteStreamName writeStream = - WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]"); - AppendRowsRequest request = - AppendRowsRequest.newBuilder().setWriteStream(writeStream.toString()).build(); + Storage.AppendRowsRequest request = + Storage.AppendRowsRequest.newBuilder() + .setWriteStream( + WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]").toString()) + .setOffset(Int64Value.newBuilder().build()) + .setIgnoreUnknownFields(true) + .build(); - MockStreamObserver responseObserver = new MockStreamObserver<>(); + MockStreamObserver responseObserver = new MockStreamObserver<>(); - BidiStreamingCallable callable = + BidiStreamingCallable callable = client.appendRowsCallable(); - ApiStreamObserver requestObserver = + ApiStreamObserver requestObserver = callable.bidiStreamingCall(responseObserver); requestObserver.onNext(request); try { - List actualResponses = responseObserver.future().get(); + List actualResponses = responseObserver.future().get(); Assert.fail("No exception thrown"); } catch (ExecutionException e) { Assert.assertTrue(e.getCause() instanceof InvalidArgumentException); - InvalidArgumentException apiException = (InvalidArgumentException) e.getCause(); + InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause()); Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode()); } } @Test - @SuppressWarnings("all") - public void getWriteStreamTest() { - WriteStreamName name2 = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]"); - String externalId = "externalId-1153075697"; - WriteStream expectedResponse = - WriteStream.newBuilder().setName(name2.toString()).setExternalId(externalId).build(); + public void getWriteStreamTest() throws Exception { + Stream.WriteStream expectedResponse = Stream.WriteStream.newBuilder().build(); mockBigQueryWrite.addResponse(expectedResponse); WriteStreamName name = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]"); - WriteStream actualResponse = client.getWriteStream(name); + Stream.WriteStream actualResponse = client.getWriteStream(name); Assert.assertEquals(expectedResponse, actualResponse); List actualRequests = mockBigQueryWrite.getRequests(); Assert.assertEquals(1, actualRequests.size()); - GetWriteStreamRequest actualRequest = (GetWriteStreamRequest) actualRequests.get(0); + Storage.GetWriteStreamRequest actualRequest = + ((Storage.GetWriteStreamRequest) actualRequests.get(0)); - Assert.assertEquals(name, WriteStreamName.parse(actualRequest.getName())); + Assert.assertEquals(name.toString(), actualRequest.getName()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), @@ -215,39 +240,72 @@ public void getWriteStreamTest() { } @Test - @SuppressWarnings("all") public void getWriteStreamExceptionTest() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockBigQueryWrite.addException(exception); try { WriteStreamName name = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]"); + client.getWriteStream(name); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception. + } + } + @Test + public void getWriteStreamTest2() throws Exception { + Stream.WriteStream expectedResponse = Stream.WriteStream.newBuilder().build(); + mockBigQueryWrite.addResponse(expectedResponse); + + String name = "name3373707"; + + Stream.WriteStream actualResponse = client.getWriteStream(name); + Assert.assertEquals(expectedResponse, actualResponse); + + List actualRequests = mockBigQueryWrite.getRequests(); + Assert.assertEquals(1, actualRequests.size()); + Storage.GetWriteStreamRequest actualRequest = + ((Storage.GetWriteStreamRequest) actualRequests.get(0)); + + Assert.assertEquals(name, actualRequest.getName()); + Assert.assertTrue( + channelProvider.isHeaderSent( + ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), + GaxGrpcProperties.getDefaultApiClientHeaderPattern())); + } + + @Test + public void getWriteStreamExceptionTest2() throws Exception { + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); + mockBigQueryWrite.addException(exception); + + try { + String name = "name3373707"; client.getWriteStream(name); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { - // Expected exception + // Expected exception. } } @Test - @SuppressWarnings("all") - public void finalizeWriteStreamTest() { - long rowCount = 1340416618L; - FinalizeWriteStreamResponse expectedResponse = - FinalizeWriteStreamResponse.newBuilder().setRowCount(rowCount).build(); + public void finalizeWriteStreamTest() throws Exception { + Storage.FinalizeWriteStreamResponse expectedResponse = + Storage.FinalizeWriteStreamResponse.newBuilder().build(); mockBigQueryWrite.addResponse(expectedResponse); WriteStreamName name = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]"); - FinalizeWriteStreamResponse actualResponse = client.finalizeWriteStream(name); + Storage.FinalizeWriteStreamResponse actualResponse = client.finalizeWriteStream(name); Assert.assertEquals(expectedResponse, actualResponse); List actualRequests = mockBigQueryWrite.getRequests(); Assert.assertEquals(1, actualRequests.size()); - FinalizeWriteStreamRequest actualRequest = (FinalizeWriteStreamRequest) actualRequests.get(0); + Storage.FinalizeWriteStreamRequest actualRequest = + ((Storage.FinalizeWriteStreamRequest) actualRequests.get(0)); - Assert.assertEquals(name, WriteStreamName.parse(actualRequest.getName())); + Assert.assertEquals(name.toString(), actualRequest.getName()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), @@ -255,39 +313,73 @@ public void finalizeWriteStreamTest() { } @Test - @SuppressWarnings("all") public void finalizeWriteStreamExceptionTest() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockBigQueryWrite.addException(exception); try { WriteStreamName name = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]"); + client.finalizeWriteStream(name); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception. + } + } + + @Test + public void finalizeWriteStreamTest2() throws Exception { + Storage.FinalizeWriteStreamResponse expectedResponse = + Storage.FinalizeWriteStreamResponse.newBuilder().build(); + mockBigQueryWrite.addResponse(expectedResponse); + + String name = "name3373707"; + Storage.FinalizeWriteStreamResponse actualResponse = client.finalizeWriteStream(name); + Assert.assertEquals(expectedResponse, actualResponse); + + List actualRequests = mockBigQueryWrite.getRequests(); + Assert.assertEquals(1, actualRequests.size()); + Storage.FinalizeWriteStreamRequest actualRequest = + ((Storage.FinalizeWriteStreamRequest) actualRequests.get(0)); + + Assert.assertEquals(name, actualRequest.getName()); + Assert.assertTrue( + channelProvider.isHeaderSent( + ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), + GaxGrpcProperties.getDefaultApiClientHeaderPattern())); + } + + @Test + public void finalizeWriteStreamExceptionTest2() throws Exception { + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); + mockBigQueryWrite.addException(exception); + + try { + String name = "name3373707"; client.finalizeWriteStream(name); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { - // Expected exception + // Expected exception. } } @Test - @SuppressWarnings("all") - public void batchCommitWriteStreamsTest() { - BatchCommitWriteStreamsResponse expectedResponse = - BatchCommitWriteStreamsResponse.newBuilder().build(); + public void batchCommitWriteStreamsTest() throws Exception { + Storage.BatchCommitWriteStreamsResponse expectedResponse = + Storage.BatchCommitWriteStreamsResponse.newBuilder().build(); mockBigQueryWrite.addResponse(expectedResponse); TableName parent = TableName.of("[PROJECT]", "[DATASET]", "[TABLE]"); - BatchCommitWriteStreamsResponse actualResponse = client.batchCommitWriteStreams(parent); + Storage.BatchCommitWriteStreamsResponse actualResponse = client.batchCommitWriteStreams(parent); Assert.assertEquals(expectedResponse, actualResponse); List actualRequests = mockBigQueryWrite.getRequests(); Assert.assertEquals(1, actualRequests.size()); - BatchCommitWriteStreamsRequest actualRequest = - (BatchCommitWriteStreamsRequest) actualRequests.get(0); + Storage.BatchCommitWriteStreamsRequest actualRequest = + ((Storage.BatchCommitWriteStreamsRequest) actualRequests.get(0)); - Assert.assertEquals(parent, TableName.parse(actualRequest.getParent())); + Assert.assertEquals(parent.toString(), actualRequest.getParent()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), @@ -295,39 +387,72 @@ public void batchCommitWriteStreamsTest() { } @Test - @SuppressWarnings("all") public void batchCommitWriteStreamsExceptionTest() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockBigQueryWrite.addException(exception); try { TableName parent = TableName.of("[PROJECT]", "[DATASET]", "[TABLE]"); + client.batchCommitWriteStreams(parent); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception. + } + } + + @Test + public void batchCommitWriteStreamsTest2() throws Exception { + Storage.BatchCommitWriteStreamsResponse expectedResponse = + Storage.BatchCommitWriteStreamsResponse.newBuilder().build(); + mockBigQueryWrite.addResponse(expectedResponse); + + String parent = "parent-995424086"; + + Storage.BatchCommitWriteStreamsResponse actualResponse = client.batchCommitWriteStreams(parent); + Assert.assertEquals(expectedResponse, actualResponse); + + List actualRequests = mockBigQueryWrite.getRequests(); + Assert.assertEquals(1, actualRequests.size()); + Storage.BatchCommitWriteStreamsRequest actualRequest = + ((Storage.BatchCommitWriteStreamsRequest) actualRequests.get(0)); + + Assert.assertEquals(parent, actualRequest.getParent()); + Assert.assertTrue( + channelProvider.isHeaderSent( + ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), + GaxGrpcProperties.getDefaultApiClientHeaderPattern())); + } + @Test + public void batchCommitWriteStreamsExceptionTest2() throws Exception { + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); + mockBigQueryWrite.addException(exception); + + try { + String parent = "parent-995424086"; client.batchCommitWriteStreams(parent); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { - // Expected exception + // Expected exception. } } @Test - @SuppressWarnings("all") - public void flushRowsTest() { - long offset = 1019779949L; - FlushRowsResponse expectedResponse = FlushRowsResponse.newBuilder().setOffset(offset).build(); + public void flushRowsTest() throws Exception { + Storage.FlushRowsResponse expectedResponse = Storage.FlushRowsResponse.newBuilder().build(); mockBigQueryWrite.addResponse(expectedResponse); WriteStreamName writeStream = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]"); - FlushRowsResponse actualResponse = client.flushRows(writeStream); + Storage.FlushRowsResponse actualResponse = client.flushRows(writeStream); Assert.assertEquals(expectedResponse, actualResponse); List actualRequests = mockBigQueryWrite.getRequests(); Assert.assertEquals(1, actualRequests.size()); - FlushRowsRequest actualRequest = (FlushRowsRequest) actualRequests.get(0); + Storage.FlushRowsRequest actualRequest = ((Storage.FlushRowsRequest) actualRequests.get(0)); - Assert.assertEquals(writeStream, WriteStreamName.parse(actualRequest.getWriteStream())); + Assert.assertEquals(writeStream.toString(), actualRequest.getWriteStream()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), @@ -335,19 +460,52 @@ public void flushRowsTest() { } @Test - @SuppressWarnings("all") public void flushRowsExceptionTest() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockBigQueryWrite.addException(exception); try { WriteStreamName writeStream = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]"); + client.flushRows(writeStream); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception. + } + } + + @Test + public void flushRowsTest2() throws Exception { + Storage.FlushRowsResponse expectedResponse = Storage.FlushRowsResponse.newBuilder().build(); + mockBigQueryWrite.addResponse(expectedResponse); + + String writeStream = "write_stream-1431753760"; + Storage.FlushRowsResponse actualResponse = client.flushRows(writeStream); + Assert.assertEquals(expectedResponse, actualResponse); + + List actualRequests = mockBigQueryWrite.getRequests(); + Assert.assertEquals(1, actualRequests.size()); + Storage.FlushRowsRequest actualRequest = ((Storage.FlushRowsRequest) actualRequests.get(0)); + + Assert.assertEquals(writeStream, actualRequest.getWriteStream()); + Assert.assertTrue( + channelProvider.isHeaderSent( + ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), + GaxGrpcProperties.getDefaultApiClientHeaderPattern())); + } + + @Test + public void flushRowsExceptionTest2() throws Exception { + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); + mockBigQueryWrite.addException(exception); + + try { + String writeStream = "write_stream-1431753760"; client.flushRows(writeStream); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { - // Expected exception + // Expected exception. } } } diff --git a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1alpha2/MockBigQueryWrite.java b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1alpha2/MockBigQueryWrite.java index 543996d5e5..14652dc6d0 100644 --- a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1alpha2/MockBigQueryWrite.java +++ b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1alpha2/MockBigQueryWrite.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,6 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.bigquery.storage.v1alpha2; import com.google.api.core.BetaApi; @@ -20,9 +21,10 @@ import com.google.protobuf.AbstractMessage; import io.grpc.ServerServiceDefinition; import java.util.List; +import javax.annotation.Generated; -@javax.annotation.Generated("by GAPIC") @BetaApi +@Generated("by gapic-generator-java") public class MockBigQueryWrite implements MockGrpcService { private final MockBigQueryWriteImpl serviceImpl; diff --git a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1alpha2/MockBigQueryWriteImpl.java b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1alpha2/MockBigQueryWriteImpl.java index ecc8e99e05..e63712321a 100644 --- a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1alpha2/MockBigQueryWriteImpl.java +++ b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1alpha2/MockBigQueryWriteImpl.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,30 +13,21 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.bigquery.storage.v1alpha2; import com.google.api.core.BetaApi; import com.google.cloud.bigquery.storage.v1alpha2.BigQueryWriteGrpc.BigQueryWriteImplBase; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.AppendRowsRequest; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.AppendRowsResponse; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.BatchCommitWriteStreamsRequest; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.BatchCommitWriteStreamsResponse; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.CreateWriteStreamRequest; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.FinalizeWriteStreamRequest; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.FinalizeWriteStreamResponse; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.FlushRowsRequest; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.FlushRowsResponse; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.GetWriteStreamRequest; -import com.google.cloud.bigquery.storage.v1alpha2.Stream.WriteStream; import com.google.protobuf.AbstractMessage; import io.grpc.stub.StreamObserver; import java.util.ArrayList; import java.util.LinkedList; import java.util.List; import java.util.Queue; +import javax.annotation.Generated; -@javax.annotation.Generated("by GAPIC") @BetaApi +@Generated("by gapic-generator-java") public class MockBigQueryWriteImpl extends BigQueryWriteImplBase { private List requests; private Queue responses; @@ -69,32 +60,33 @@ public void reset() { @Override public void createWriteStream( - CreateWriteStreamRequest request, StreamObserver responseObserver) { + Storage.CreateWriteStreamRequest request, + StreamObserver responseObserver) { Object response = responses.remove(); - if (response instanceof WriteStream) { + if (response instanceof Stream.WriteStream) { requests.add(request); - responseObserver.onNext((WriteStream) response); + responseObserver.onNext(((Stream.WriteStream) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { - responseObserver.onError((Exception) response); + responseObserver.onError(((Exception) response)); } else { responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); } } @Override - public StreamObserver appendRows( - final StreamObserver responseObserver) { - StreamObserver requestObserver = - new StreamObserver() { + public StreamObserver appendRows( + final StreamObserver responseObserver) { + StreamObserver requestObserver = + new StreamObserver() { @Override - public void onNext(AppendRowsRequest value) { + public void onNext(Storage.AppendRowsRequest value) { requests.add(value); final Object response = responses.remove(); - if (response instanceof AppendRowsResponse) { - responseObserver.onNext((AppendRowsResponse) response); + if (response instanceof Storage.AppendRowsResponse) { + responseObserver.onNext(((Storage.AppendRowsResponse) response)); } else if (response instanceof Exception) { - responseObserver.onError((Exception) response); + responseObserver.onError(((Exception) response)); } else { responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); } @@ -115,14 +107,14 @@ public void onCompleted() { @Override public void getWriteStream( - GetWriteStreamRequest request, StreamObserver responseObserver) { + Storage.GetWriteStreamRequest request, StreamObserver responseObserver) { Object response = responses.remove(); - if (response instanceof WriteStream) { + if (response instanceof Stream.WriteStream) { requests.add(request); - responseObserver.onNext((WriteStream) response); + responseObserver.onNext(((Stream.WriteStream) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { - responseObserver.onError((Exception) response); + responseObserver.onError(((Exception) response)); } else { responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); } @@ -130,15 +122,15 @@ public void getWriteStream( @Override public void finalizeWriteStream( - FinalizeWriteStreamRequest request, - StreamObserver responseObserver) { + Storage.FinalizeWriteStreamRequest request, + StreamObserver responseObserver) { Object response = responses.remove(); - if (response instanceof FinalizeWriteStreamResponse) { + if (response instanceof Storage.FinalizeWriteStreamResponse) { requests.add(request); - responseObserver.onNext((FinalizeWriteStreamResponse) response); + responseObserver.onNext(((Storage.FinalizeWriteStreamResponse) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { - responseObserver.onError((Exception) response); + responseObserver.onError(((Exception) response)); } else { responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); } @@ -146,15 +138,15 @@ public void finalizeWriteStream( @Override public void batchCommitWriteStreams( - BatchCommitWriteStreamsRequest request, - StreamObserver responseObserver) { + Storage.BatchCommitWriteStreamsRequest request, + StreamObserver responseObserver) { Object response = responses.remove(); - if (response instanceof BatchCommitWriteStreamsResponse) { + if (response instanceof Storage.BatchCommitWriteStreamsResponse) { requests.add(request); - responseObserver.onNext((BatchCommitWriteStreamsResponse) response); + responseObserver.onNext(((Storage.BatchCommitWriteStreamsResponse) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { - responseObserver.onError((Exception) response); + responseObserver.onError(((Exception) response)); } else { responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); } @@ -162,14 +154,15 @@ public void batchCommitWriteStreams( @Override public void flushRows( - FlushRowsRequest request, StreamObserver responseObserver) { + Storage.FlushRowsRequest request, + StreamObserver responseObserver) { Object response = responses.remove(); - if (response instanceof FlushRowsResponse) { + if (response instanceof Storage.FlushRowsResponse) { requests.add(request); - responseObserver.onNext((FlushRowsResponse) response); + responseObserver.onNext(((Storage.FlushRowsResponse) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { - responseObserver.onError((Exception) response); + responseObserver.onError(((Exception) response)); } else { responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); } diff --git a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta1/BaseBigQueryStorageClientTest.java b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta1/BaseBigQueryStorageClientTest.java index 7d3c752e11..f0663837ef 100644 --- a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta1/BaseBigQueryStorageClientTest.java +++ b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta1/BaseBigQueryStorageClientTest.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,6 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.bigquery.storage.v1beta1; import com.google.api.gax.core.NoCredentialsProvider; @@ -25,27 +26,15 @@ import com.google.api.gax.rpc.InvalidArgumentException; import com.google.api.gax.rpc.ServerStreamingCallable; import com.google.api.gax.rpc.StatusCode; -import com.google.cloud.bigquery.storage.v1beta1.Storage.BatchCreateReadSessionStreamsRequest; -import com.google.cloud.bigquery.storage.v1beta1.Storage.BatchCreateReadSessionStreamsResponse; -import com.google.cloud.bigquery.storage.v1beta1.Storage.CreateReadSessionRequest; -import com.google.cloud.bigquery.storage.v1beta1.Storage.FinalizeStreamRequest; -import com.google.cloud.bigquery.storage.v1beta1.Storage.ReadRowsRequest; -import com.google.cloud.bigquery.storage.v1beta1.Storage.ReadRowsResponse; -import com.google.cloud.bigquery.storage.v1beta1.Storage.ReadSession; -import com.google.cloud.bigquery.storage.v1beta1.Storage.SplitReadStreamRequest; -import com.google.cloud.bigquery.storage.v1beta1.Storage.SplitReadStreamResponse; -import com.google.cloud.bigquery.storage.v1beta1.Storage.Stream; -import com.google.cloud.bigquery.storage.v1beta1.Storage.StreamPosition; -import com.google.cloud.bigquery.storage.v1beta1.TableReferenceProto.TableReference; import com.google.protobuf.AbstractMessage; import com.google.protobuf.Empty; -import io.grpc.Status; import io.grpc.StatusRuntimeException; import java.io.IOException; import java.util.Arrays; import java.util.List; import java.util.UUID; import java.util.concurrent.ExecutionException; +import javax.annotation.Generated; import org.junit.After; import org.junit.AfterClass; import org.junit.Assert; @@ -53,31 +42,31 @@ import org.junit.BeforeClass; import org.junit.Test; -@javax.annotation.Generated("by GAPIC") +@Generated("by gapic-generator-java") public class BaseBigQueryStorageClientTest { private static MockBigQueryStorage mockBigQueryStorage; - private static MockServiceHelper serviceHelper; + private static MockServiceHelper mockServiceHelper; private BaseBigQueryStorageClient client; private LocalChannelProvider channelProvider; @BeforeClass public static void startStaticServer() { mockBigQueryStorage = new MockBigQueryStorage(); - serviceHelper = + mockServiceHelper = new MockServiceHelper( UUID.randomUUID().toString(), Arrays.asList(mockBigQueryStorage)); - serviceHelper.start(); + mockServiceHelper.start(); } @AfterClass public static void stopServer() { - serviceHelper.stop(); + mockServiceHelper.stop(); } @Before public void setUp() throws IOException { - serviceHelper.reset(); - channelProvider = serviceHelper.createChannelProvider(); + mockServiceHelper.reset(); + channelProvider = mockServiceHelper.createChannelProvider(); BaseBigQueryStorageSettings settings = BaseBigQueryStorageSettings.newBuilder() .setTransportChannelProvider(channelProvider) @@ -92,25 +81,26 @@ public void tearDown() throws Exception { } @Test - @SuppressWarnings("all") - public void createReadSessionTest() { - ReadSessionName name = ReadSessionName.of("[PROJECT]", "[LOCATION]", "[SESSION]"); - ReadSession expectedResponse = ReadSession.newBuilder().setName(name.toString()).build(); + public void createReadSessionTest() throws Exception { + Storage.ReadSession expectedResponse = Storage.ReadSession.newBuilder().build(); mockBigQueryStorage.addResponse(expectedResponse); - TableReference tableReference = TableReference.newBuilder().build(); + TableReferenceProto.TableReference tableReference = + TableReferenceProto.TableReference.newBuilder().build(); ProjectName parent = ProjectName.of("[PROJECT]"); int requestedStreams = 1017221410; - ReadSession actualResponse = client.createReadSession(tableReference, parent, requestedStreams); + Storage.ReadSession actualResponse = + client.createReadSession(tableReference, parent, requestedStreams); Assert.assertEquals(expectedResponse, actualResponse); List actualRequests = mockBigQueryStorage.getRequests(); Assert.assertEquals(1, actualRequests.size()); - CreateReadSessionRequest actualRequest = (CreateReadSessionRequest) actualRequests.get(0); + Storage.CreateReadSessionRequest actualRequest = + ((Storage.CreateReadSessionRequest) actualRequests.get(0)); Assert.assertEquals(tableReference, actualRequest.getTableReference()); - Assert.assertEquals(parent, ProjectName.parse(actualRequest.getParent())); + Assert.assertEquals(parent.toString(), actualRequest.getParent()); Assert.assertEquals(requestedStreams, actualRequest.getRequestedStreams()); Assert.assertTrue( channelProvider.isHeaderSent( @@ -119,83 +109,123 @@ public void createReadSessionTest() { } @Test - @SuppressWarnings("all") public void createReadSessionExceptionTest() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockBigQueryStorage.addException(exception); try { - TableReference tableReference = TableReference.newBuilder().build(); + TableReferenceProto.TableReference tableReference = + TableReferenceProto.TableReference.newBuilder().build(); ProjectName parent = ProjectName.of("[PROJECT]"); int requestedStreams = 1017221410; + client.createReadSession(tableReference, parent, requestedStreams); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception. + } + } + + @Test + public void createReadSessionTest2() throws Exception { + Storage.ReadSession expectedResponse = Storage.ReadSession.newBuilder().build(); + mockBigQueryStorage.addResponse(expectedResponse); + + TableReferenceProto.TableReference tableReference = + TableReferenceProto.TableReference.newBuilder().build(); + String parent = "parent-995424086"; + int requestedStreams = 1017221410; + + Storage.ReadSession actualResponse = + client.createReadSession(tableReference, parent, requestedStreams); + Assert.assertEquals(expectedResponse, actualResponse); + + List actualRequests = mockBigQueryStorage.getRequests(); + Assert.assertEquals(1, actualRequests.size()); + Storage.CreateReadSessionRequest actualRequest = + ((Storage.CreateReadSessionRequest) actualRequests.get(0)); + Assert.assertEquals(tableReference, actualRequest.getTableReference()); + Assert.assertEquals(parent, actualRequest.getParent()); + Assert.assertEquals(requestedStreams, actualRequest.getRequestedStreams()); + Assert.assertTrue( + channelProvider.isHeaderSent( + ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), + GaxGrpcProperties.getDefaultApiClientHeaderPattern())); + } + + @Test + public void createReadSessionExceptionTest2() throws Exception { + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); + mockBigQueryStorage.addException(exception); + + try { + TableReferenceProto.TableReference tableReference = + TableReferenceProto.TableReference.newBuilder().build(); + String parent = "parent-995424086"; + int requestedStreams = 1017221410; client.createReadSession(tableReference, parent, requestedStreams); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { - // Expected exception + // Expected exception. } } @Test - @SuppressWarnings("all") public void readRowsTest() throws Exception { - long rowCount = 1340416618L; - ReadRowsResponse expectedResponse = ReadRowsResponse.newBuilder().setRowCount(rowCount).build(); + Storage.ReadRowsResponse expectedResponse = Storage.ReadRowsResponse.newBuilder().build(); mockBigQueryStorage.addResponse(expectedResponse); - StreamPosition readPosition = StreamPosition.newBuilder().build(); - ReadRowsRequest request = ReadRowsRequest.newBuilder().setReadPosition(readPosition).build(); + Storage.ReadRowsRequest request = Storage.ReadRowsRequest.newBuilder().build(); - MockStreamObserver responseObserver = new MockStreamObserver<>(); + MockStreamObserver responseObserver = new MockStreamObserver<>(); - ServerStreamingCallable callable = client.readRowsCallable(); + ServerStreamingCallable callable = + client.readRowsCallable(); callable.serverStreamingCall(request, responseObserver); - List actualResponses = responseObserver.future().get(); + List actualResponses = responseObserver.future().get(); Assert.assertEquals(1, actualResponses.size()); Assert.assertEquals(expectedResponse, actualResponses.get(0)); } @Test - @SuppressWarnings("all") public void readRowsExceptionTest() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockBigQueryStorage.addException(exception); - StreamPosition readPosition = StreamPosition.newBuilder().build(); - ReadRowsRequest request = ReadRowsRequest.newBuilder().setReadPosition(readPosition).build(); + Storage.ReadRowsRequest request = Storage.ReadRowsRequest.newBuilder().build(); - MockStreamObserver responseObserver = new MockStreamObserver<>(); + MockStreamObserver responseObserver = new MockStreamObserver<>(); - ServerStreamingCallable callable = client.readRowsCallable(); + ServerStreamingCallable callable = + client.readRowsCallable(); callable.serverStreamingCall(request, responseObserver); try { - List actualResponses = responseObserver.future().get(); + List actualResponses = responseObserver.future().get(); Assert.fail("No exception thrown"); } catch (ExecutionException e) { Assert.assertTrue(e.getCause() instanceof InvalidArgumentException); - InvalidArgumentException apiException = (InvalidArgumentException) e.getCause(); + InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause()); Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode()); } } @Test - @SuppressWarnings("all") - public void batchCreateReadSessionStreamsTest() { - BatchCreateReadSessionStreamsResponse expectedResponse = - BatchCreateReadSessionStreamsResponse.newBuilder().build(); + public void batchCreateReadSessionStreamsTest() throws Exception { + Storage.BatchCreateReadSessionStreamsResponse expectedResponse = + Storage.BatchCreateReadSessionStreamsResponse.newBuilder().build(); mockBigQueryStorage.addResponse(expectedResponse); - ReadSession session = ReadSession.newBuilder().build(); + Storage.ReadSession session = Storage.ReadSession.newBuilder().build(); int requestedStreams = 1017221410; - BatchCreateReadSessionStreamsResponse actualResponse = + Storage.BatchCreateReadSessionStreamsResponse actualResponse = client.batchCreateReadSessionStreams(session, requestedStreams); Assert.assertEquals(expectedResponse, actualResponse); List actualRequests = mockBigQueryStorage.getRequests(); Assert.assertEquals(1, actualRequests.size()); - BatchCreateReadSessionStreamsRequest actualRequest = - (BatchCreateReadSessionStreamsRequest) actualRequests.get(0); + Storage.BatchCreateReadSessionStreamsRequest actualRequest = + ((Storage.BatchCreateReadSessionStreamsRequest) actualRequests.get(0)); Assert.assertEquals(session, actualRequest.getSession()); Assert.assertEquals(requestedStreams, actualRequest.getRequestedStreams()); @@ -206,35 +236,33 @@ public void batchCreateReadSessionStreamsTest() { } @Test - @SuppressWarnings("all") public void batchCreateReadSessionStreamsExceptionTest() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockBigQueryStorage.addException(exception); try { - ReadSession session = ReadSession.newBuilder().build(); + Storage.ReadSession session = Storage.ReadSession.newBuilder().build(); int requestedStreams = 1017221410; - client.batchCreateReadSessionStreams(session, requestedStreams); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { - // Expected exception + // Expected exception. } } @Test - @SuppressWarnings("all") - public void finalizeStreamTest() { + public void finalizeStreamTest() throws Exception { Empty expectedResponse = Empty.newBuilder().build(); mockBigQueryStorage.addResponse(expectedResponse); - Stream stream = Stream.newBuilder().build(); + Storage.Stream stream = Storage.Stream.newBuilder().build(); client.finalizeStream(stream); List actualRequests = mockBigQueryStorage.getRequests(); Assert.assertEquals(1, actualRequests.size()); - FinalizeStreamRequest actualRequest = (FinalizeStreamRequest) actualRequests.get(0); + Storage.FinalizeStreamRequest actualRequest = + ((Storage.FinalizeStreamRequest) actualRequests.get(0)); Assert.assertEquals(stream, actualRequest.getStream()); Assert.assertTrue( @@ -244,35 +272,34 @@ public void finalizeStreamTest() { } @Test - @SuppressWarnings("all") public void finalizeStreamExceptionTest() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockBigQueryStorage.addException(exception); try { - Stream stream = Stream.newBuilder().build(); - + Storage.Stream stream = Storage.Stream.newBuilder().build(); client.finalizeStream(stream); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { - // Expected exception + // Expected exception. } } @Test - @SuppressWarnings("all") - public void splitReadStreamTest() { - SplitReadStreamResponse expectedResponse = SplitReadStreamResponse.newBuilder().build(); + public void splitReadStreamTest() throws Exception { + Storage.SplitReadStreamResponse expectedResponse = + Storage.SplitReadStreamResponse.newBuilder().build(); mockBigQueryStorage.addResponse(expectedResponse); - Stream originalStream = Stream.newBuilder().build(); + Storage.Stream originalStream = Storage.Stream.newBuilder().build(); - SplitReadStreamResponse actualResponse = client.splitReadStream(originalStream); + Storage.SplitReadStreamResponse actualResponse = client.splitReadStream(originalStream); Assert.assertEquals(expectedResponse, actualResponse); List actualRequests = mockBigQueryStorage.getRequests(); Assert.assertEquals(1, actualRequests.size()); - SplitReadStreamRequest actualRequest = (SplitReadStreamRequest) actualRequests.get(0); + Storage.SplitReadStreamRequest actualRequest = + ((Storage.SplitReadStreamRequest) actualRequests.get(0)); Assert.assertEquals(originalStream, actualRequest.getOriginalStream()); Assert.assertTrue( @@ -282,18 +309,16 @@ public void splitReadStreamTest() { } @Test - @SuppressWarnings("all") public void splitReadStreamExceptionTest() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockBigQueryStorage.addException(exception); try { - Stream originalStream = Stream.newBuilder().build(); - + Storage.Stream originalStream = Storage.Stream.newBuilder().build(); client.splitReadStream(originalStream); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { - // Expected exception + // Expected exception. } } } diff --git a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta1/MockBigQueryStorage.java b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta1/MockBigQueryStorage.java index 6110c0f370..36e2257abe 100644 --- a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta1/MockBigQueryStorage.java +++ b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta1/MockBigQueryStorage.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,6 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.bigquery.storage.v1beta1; import com.google.api.core.BetaApi; @@ -20,9 +21,10 @@ import com.google.protobuf.AbstractMessage; import io.grpc.ServerServiceDefinition; import java.util.List; +import javax.annotation.Generated; -@javax.annotation.Generated("by GAPIC") @BetaApi +@Generated("by gapic-generator-java") public class MockBigQueryStorage implements MockGrpcService { private final MockBigQueryStorageImpl serviceImpl; diff --git a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta1/MockBigQueryStorageImpl.java b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta1/MockBigQueryStorageImpl.java index 41197eb3e1..79dc8f2ca2 100644 --- a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta1/MockBigQueryStorageImpl.java +++ b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta1/MockBigQueryStorageImpl.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,19 +13,11 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.bigquery.storage.v1beta1; import com.google.api.core.BetaApi; import com.google.cloud.bigquery.storage.v1beta1.BigQueryStorageGrpc.BigQueryStorageImplBase; -import com.google.cloud.bigquery.storage.v1beta1.Storage.BatchCreateReadSessionStreamsRequest; -import com.google.cloud.bigquery.storage.v1beta1.Storage.BatchCreateReadSessionStreamsResponse; -import com.google.cloud.bigquery.storage.v1beta1.Storage.CreateReadSessionRequest; -import com.google.cloud.bigquery.storage.v1beta1.Storage.FinalizeStreamRequest; -import com.google.cloud.bigquery.storage.v1beta1.Storage.ReadRowsRequest; -import com.google.cloud.bigquery.storage.v1beta1.Storage.ReadRowsResponse; -import com.google.cloud.bigquery.storage.v1beta1.Storage.ReadSession; -import com.google.cloud.bigquery.storage.v1beta1.Storage.SplitReadStreamRequest; -import com.google.cloud.bigquery.storage.v1beta1.Storage.SplitReadStreamResponse; import com.google.protobuf.AbstractMessage; import com.google.protobuf.Empty; import io.grpc.stub.StreamObserver; @@ -33,9 +25,10 @@ import java.util.LinkedList; import java.util.List; import java.util.Queue; +import javax.annotation.Generated; -@javax.annotation.Generated("by GAPIC") @BetaApi +@Generated("by gapic-generator-java") public class MockBigQueryStorageImpl extends BigQueryStorageImplBase { private List requests; private Queue responses; @@ -68,28 +61,30 @@ public void reset() { @Override public void createReadSession( - CreateReadSessionRequest request, StreamObserver responseObserver) { + Storage.CreateReadSessionRequest request, + StreamObserver responseObserver) { Object response = responses.remove(); - if (response instanceof ReadSession) { + if (response instanceof Storage.ReadSession) { requests.add(request); - responseObserver.onNext((ReadSession) response); + responseObserver.onNext(((Storage.ReadSession) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { - responseObserver.onError((Exception) response); + responseObserver.onError(((Exception) response)); } else { responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); } } @Override - public void readRows(ReadRowsRequest request, StreamObserver responseObserver) { + public void readRows( + Storage.ReadRowsRequest request, StreamObserver responseObserver) { Object response = responses.remove(); - if (response instanceof ReadRowsResponse) { + if (response instanceof Storage.ReadRowsResponse) { requests.add(request); - responseObserver.onNext((ReadRowsResponse) response); + responseObserver.onNext(((Storage.ReadRowsResponse) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { - responseObserver.onError((Exception) response); + responseObserver.onError(((Exception) response)); } else { responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); } @@ -97,15 +92,15 @@ public void readRows(ReadRowsRequest request, StreamObserver r @Override public void batchCreateReadSessionStreams( - BatchCreateReadSessionStreamsRequest request, - StreamObserver responseObserver) { + Storage.BatchCreateReadSessionStreamsRequest request, + StreamObserver responseObserver) { Object response = responses.remove(); - if (response instanceof BatchCreateReadSessionStreamsResponse) { + if (response instanceof Storage.BatchCreateReadSessionStreamsResponse) { requests.add(request); - responseObserver.onNext((BatchCreateReadSessionStreamsResponse) response); + responseObserver.onNext(((Storage.BatchCreateReadSessionStreamsResponse) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { - responseObserver.onError((Exception) response); + responseObserver.onError(((Exception) response)); } else { responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); } @@ -113,14 +108,14 @@ public void batchCreateReadSessionStreams( @Override public void finalizeStream( - FinalizeStreamRequest request, StreamObserver responseObserver) { + Storage.FinalizeStreamRequest request, StreamObserver responseObserver) { Object response = responses.remove(); if (response instanceof Empty) { requests.add(request); - responseObserver.onNext((Empty) response); + responseObserver.onNext(((Empty) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { - responseObserver.onError((Exception) response); + responseObserver.onError(((Exception) response)); } else { responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); } @@ -128,14 +123,15 @@ public void finalizeStream( @Override public void splitReadStream( - SplitReadStreamRequest request, StreamObserver responseObserver) { + Storage.SplitReadStreamRequest request, + StreamObserver responseObserver) { Object response = responses.remove(); - if (response instanceof SplitReadStreamResponse) { + if (response instanceof Storage.SplitReadStreamResponse) { requests.add(request); - responseObserver.onNext((SplitReadStreamResponse) response); + responseObserver.onNext(((Storage.SplitReadStreamResponse) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { - responseObserver.onError((Exception) response); + responseObserver.onError(((Exception) response)); } else { responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); } diff --git a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/BaseBigQueryReadClientTest.java b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/BaseBigQueryReadClientTest.java index 24966efa77..e144d138a7 100644 --- a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/BaseBigQueryReadClientTest.java +++ b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/BaseBigQueryReadClientTest.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,6 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.bigquery.storage.v1beta2; import com.google.api.gax.core.NoCredentialsProvider; @@ -26,13 +27,15 @@ import com.google.api.gax.rpc.ServerStreamingCallable; import com.google.api.gax.rpc.StatusCode; import com.google.protobuf.AbstractMessage; -import io.grpc.Status; +import com.google.protobuf.Timestamp; import io.grpc.StatusRuntimeException; import java.io.IOException; +import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.UUID; import java.util.concurrent.ExecutionException; +import javax.annotation.Generated; import org.junit.After; import org.junit.AfterClass; import org.junit.Assert; @@ -40,34 +43,31 @@ import org.junit.BeforeClass; import org.junit.Test; -@javax.annotation.Generated("by GAPIC") +@Generated("by gapic-generator-java") public class BaseBigQueryReadClientTest { private static MockBigQueryRead mockBigQueryRead; - private static MockBigQueryWrite mockBigQueryWrite; - private static MockServiceHelper serviceHelper; + private static MockServiceHelper mockServiceHelper; private BaseBigQueryReadClient client; private LocalChannelProvider channelProvider; @BeforeClass public static void startStaticServer() { mockBigQueryRead = new MockBigQueryRead(); - mockBigQueryWrite = new MockBigQueryWrite(); - serviceHelper = + mockServiceHelper = new MockServiceHelper( - UUID.randomUUID().toString(), - Arrays.asList(mockBigQueryRead, mockBigQueryWrite)); - serviceHelper.start(); + UUID.randomUUID().toString(), Arrays.asList(mockBigQueryRead)); + mockServiceHelper.start(); } @AfterClass public static void stopServer() { - serviceHelper.stop(); + mockServiceHelper.stop(); } @Before public void setUp() throws IOException { - serviceHelper.reset(); - channelProvider = serviceHelper.createChannelProvider(); + mockServiceHelper.reset(); + channelProvider = mockServiceHelper.createChannelProvider(); BaseBigQueryReadSettings settings = BaseBigQueryReadSettings.newBuilder() .setTransportChannelProvider(channelProvider) @@ -82,12 +82,14 @@ public void tearDown() throws Exception { } @Test - @SuppressWarnings("all") - public void createReadSessionTest() { - ReadSessionName name = ReadSessionName.of("[PROJECT]", "[LOCATION]", "[SESSION]"); - TableName table = TableName.of("[PROJECT]", "[DATASET]", "[TABLE]"); + public void createReadSessionTest() throws Exception { ReadSession expectedResponse = - ReadSession.newBuilder().setName(name.toString()).setTable(table.toString()).build(); + ReadSession.newBuilder() + .setName("name3373707") + .setExpireTime(Timestamp.newBuilder().build()) + .setTable(TableName.of("[PROJECT]", "[DATASET]", "[TABLE]").toString()) + .addAllStreams(new ArrayList()) + .build(); mockBigQueryRead.addResponse(expectedResponse); ProjectName parent = ProjectName.of("[PROJECT]"); @@ -99,9 +101,9 @@ public void createReadSessionTest() { List actualRequests = mockBigQueryRead.getRequests(); Assert.assertEquals(1, actualRequests.size()); - CreateReadSessionRequest actualRequest = (CreateReadSessionRequest) actualRequests.get(0); + CreateReadSessionRequest actualRequest = ((CreateReadSessionRequest) actualRequests.get(0)); - Assert.assertEquals(parent, ProjectName.parse(actualRequest.getParent())); + Assert.assertEquals(parent.toString(), actualRequest.getParent()); Assert.assertEquals(readSession, actualRequest.getReadSession()); Assert.assertEquals(maxStreamCount, actualRequest.getMaxStreamCount()); Assert.assertTrue( @@ -111,33 +113,83 @@ public void createReadSessionTest() { } @Test - @SuppressWarnings("all") public void createReadSessionExceptionTest() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockBigQueryRead.addException(exception); try { ProjectName parent = ProjectName.of("[PROJECT]"); ReadSession readSession = ReadSession.newBuilder().build(); int maxStreamCount = 940837515; + client.createReadSession(parent, readSession, maxStreamCount); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception. + } + } + + @Test + public void createReadSessionTest2() throws Exception { + ReadSession expectedResponse = + ReadSession.newBuilder() + .setName("name3373707") + .setExpireTime(Timestamp.newBuilder().build()) + .setTable(TableName.of("[PROJECT]", "[DATASET]", "[TABLE]").toString()) + .addAllStreams(new ArrayList()) + .build(); + mockBigQueryRead.addResponse(expectedResponse); + + String parent = "parent-995424086"; + ReadSession readSession = ReadSession.newBuilder().build(); + int maxStreamCount = 940837515; + + ReadSession actualResponse = client.createReadSession(parent, readSession, maxStreamCount); + Assert.assertEquals(expectedResponse, actualResponse); + + List actualRequests = mockBigQueryRead.getRequests(); + Assert.assertEquals(1, actualRequests.size()); + CreateReadSessionRequest actualRequest = ((CreateReadSessionRequest) actualRequests.get(0)); + Assert.assertEquals(parent, actualRequest.getParent()); + Assert.assertEquals(readSession, actualRequest.getReadSession()); + Assert.assertEquals(maxStreamCount, actualRequest.getMaxStreamCount()); + Assert.assertTrue( + channelProvider.isHeaderSent( + ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), + GaxGrpcProperties.getDefaultApiClientHeaderPattern())); + } + + @Test + public void createReadSessionExceptionTest2() throws Exception { + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); + mockBigQueryRead.addException(exception); + + try { + String parent = "parent-995424086"; + ReadSession readSession = ReadSession.newBuilder().build(); + int maxStreamCount = 940837515; client.createReadSession(parent, readSession, maxStreamCount); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { - // Expected exception + // Expected exception. } } @Test - @SuppressWarnings("all") public void readRowsTest() throws Exception { - long rowCount = 1340416618L; - ReadRowsResponse expectedResponse = ReadRowsResponse.newBuilder().setRowCount(rowCount).build(); + ReadRowsResponse expectedResponse = + ReadRowsResponse.newBuilder() + .setRowCount(1340416618) + .setStats(StreamStats.newBuilder().build()) + .setThrottleState(ThrottleState.newBuilder().build()) + .build(); mockBigQueryRead.addResponse(expectedResponse); - ReadStreamName readStream = - ReadStreamName.of("[PROJECT]", "[LOCATION]", "[SESSION]", "[STREAM]"); ReadRowsRequest request = - ReadRowsRequest.newBuilder().setReadStream(readStream.toString()).build(); + ReadRowsRequest.newBuilder() + .setReadStream( + ReadStreamName.of("[PROJECT]", "[LOCATION]", "[SESSION]", "[STREAM]").toString()) + .setOffset(-1019779949) + .build(); MockStreamObserver responseObserver = new MockStreamObserver<>(); @@ -150,14 +202,15 @@ public void readRowsTest() throws Exception { } @Test - @SuppressWarnings("all") public void readRowsExceptionTest() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockBigQueryRead.addException(exception); - ReadStreamName readStream = - ReadStreamName.of("[PROJECT]", "[LOCATION]", "[SESSION]", "[STREAM]"); ReadRowsRequest request = - ReadRowsRequest.newBuilder().setReadStream(readStream.toString()).build(); + ReadRowsRequest.newBuilder() + .setReadStream( + ReadStreamName.of("[PROJECT]", "[LOCATION]", "[SESSION]", "[STREAM]").toString()) + .setOffset(-1019779949) + .build(); MockStreamObserver responseObserver = new MockStreamObserver<>(); @@ -169,29 +222,36 @@ public void readRowsExceptionTest() throws Exception { Assert.fail("No exception thrown"); } catch (ExecutionException e) { Assert.assertTrue(e.getCause() instanceof InvalidArgumentException); - InvalidArgumentException apiException = (InvalidArgumentException) e.getCause(); + InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause()); Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode()); } } @Test - @SuppressWarnings("all") - public void splitReadStreamTest() { - SplitReadStreamResponse expectedResponse = SplitReadStreamResponse.newBuilder().build(); + public void splitReadStreamTest() throws Exception { + SplitReadStreamResponse expectedResponse = + SplitReadStreamResponse.newBuilder() + .setPrimaryStream(ReadStream.newBuilder().build()) + .setRemainderStream(ReadStream.newBuilder().build()) + .build(); mockBigQueryRead.addResponse(expectedResponse); - ReadStreamName name = ReadStreamName.of("[PROJECT]", "[LOCATION]", "[SESSION]", "[STREAM]"); SplitReadStreamRequest request = - SplitReadStreamRequest.newBuilder().setName(name.toString()).build(); + SplitReadStreamRequest.newBuilder() + .setName( + ReadStreamName.of("[PROJECT]", "[LOCATION]", "[SESSION]", "[STREAM]").toString()) + .setFraction(-1653751294) + .build(); SplitReadStreamResponse actualResponse = client.splitReadStream(request); Assert.assertEquals(expectedResponse, actualResponse); List actualRequests = mockBigQueryRead.getRequests(); Assert.assertEquals(1, actualRequests.size()); - SplitReadStreamRequest actualRequest = (SplitReadStreamRequest) actualRequests.get(0); + SplitReadStreamRequest actualRequest = ((SplitReadStreamRequest) actualRequests.get(0)); - Assert.assertEquals(name, ReadStreamName.parse(actualRequest.getName())); + Assert.assertEquals(request.getName(), actualRequest.getName()); + Assert.assertEquals(request.getFraction(), actualRequest.getFraction(), 0.0001); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), @@ -199,20 +259,21 @@ public void splitReadStreamTest() { } @Test - @SuppressWarnings("all") public void splitReadStreamExceptionTest() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockBigQueryRead.addException(exception); try { - ReadStreamName name = ReadStreamName.of("[PROJECT]", "[LOCATION]", "[SESSION]", "[STREAM]"); SplitReadStreamRequest request = - SplitReadStreamRequest.newBuilder().setName(name.toString()).build(); - + SplitReadStreamRequest.newBuilder() + .setName( + ReadStreamName.of("[PROJECT]", "[LOCATION]", "[SESSION]", "[STREAM]").toString()) + .setFraction(-1653751294) + .build(); client.splitReadStream(request); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { - // Expected exception + // Expected exception. } } } diff --git a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/BigQueryWriteClientTest.java b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/BigQueryWriteClientTest.java index f154e638c3..1a9c3ee14b 100644 --- a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/BigQueryWriteClientTest.java +++ b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/BigQueryWriteClientTest.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,6 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.bigquery.storage.v1beta2; import com.google.api.gax.core.NoCredentialsProvider; @@ -27,13 +28,15 @@ import com.google.api.gax.rpc.InvalidArgumentException; import com.google.api.gax.rpc.StatusCode; import com.google.protobuf.AbstractMessage; -import io.grpc.Status; +import com.google.protobuf.Int64Value; +import com.google.protobuf.Timestamp; import io.grpc.StatusRuntimeException; import java.io.IOException; import java.util.Arrays; import java.util.List; import java.util.UUID; import java.util.concurrent.ExecutionException; +import javax.annotation.Generated; import org.junit.After; import org.junit.AfterClass; import org.junit.Assert; @@ -41,34 +44,31 @@ import org.junit.BeforeClass; import org.junit.Test; -@javax.annotation.Generated("by GAPIC") +@Generated("by gapic-generator-java") public class BigQueryWriteClientTest { - private static MockBigQueryRead mockBigQueryRead; - private static MockBigQueryWrite mockBigQueryWrite; - private static MockServiceHelper serviceHelper; + private static MockServiceHelper mockServiceHelper; private BigQueryWriteClient client; private LocalChannelProvider channelProvider; + private static MockBigQueryWrite mockBigQueryWrite; @BeforeClass public static void startStaticServer() { - mockBigQueryRead = new MockBigQueryRead(); mockBigQueryWrite = new MockBigQueryWrite(); - serviceHelper = + mockServiceHelper = new MockServiceHelper( - UUID.randomUUID().toString(), - Arrays.asList(mockBigQueryRead, mockBigQueryWrite)); - serviceHelper.start(); + UUID.randomUUID().toString(), Arrays.asList(mockBigQueryWrite)); + mockServiceHelper.start(); } @AfterClass public static void stopServer() { - serviceHelper.stop(); + mockServiceHelper.stop(); } @Before public void setUp() throws IOException { - serviceHelper.reset(); - channelProvider = serviceHelper.createChannelProvider(); + mockServiceHelper.reset(); + channelProvider = mockServiceHelper.createChannelProvider(); BigQueryWriteSettings settings = BigQueryWriteSettings.newBuilder() .setTransportChannelProvider(channelProvider) @@ -83,12 +83,15 @@ public void tearDown() throws Exception { } @Test - @SuppressWarnings("all") - public void createWriteStreamTest() { - WriteStreamName name = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]"); - String externalId = "externalId-1153075697"; + public void createWriteStreamTest() throws Exception { WriteStream expectedResponse = - WriteStream.newBuilder().setName(name.toString()).setExternalId(externalId).build(); + WriteStream.newBuilder() + .setName(WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]").toString()) + .setCreateTime(Timestamp.newBuilder().build()) + .setCommitTime(Timestamp.newBuilder().build()) + .setTableSchema(TableSchema.newBuilder().build()) + .setExternalId("external_id-1153075697") + .build(); mockBigQueryWrite.addResponse(expectedResponse); TableName parent = TableName.of("[PROJECT]", "[DATASET]", "[TABLE]"); @@ -99,9 +102,9 @@ public void createWriteStreamTest() { List actualRequests = mockBigQueryWrite.getRequests(); Assert.assertEquals(1, actualRequests.size()); - CreateWriteStreamRequest actualRequest = (CreateWriteStreamRequest) actualRequests.get(0); + CreateWriteStreamRequest actualRequest = ((CreateWriteStreamRequest) actualRequests.get(0)); - Assert.assertEquals(parent, TableName.parse(actualRequest.getParent())); + Assert.assertEquals(parent.toString(), actualRequest.getParent()); Assert.assertEquals(writeStream, actualRequest.getWriteStream()); Assert.assertTrue( channelProvider.isHeaderSent( @@ -110,32 +113,77 @@ public void createWriteStreamTest() { } @Test - @SuppressWarnings("all") public void createWriteStreamExceptionTest() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockBigQueryWrite.addException(exception); try { TableName parent = TableName.of("[PROJECT]", "[DATASET]", "[TABLE]"); WriteStream writeStream = WriteStream.newBuilder().build(); + client.createWriteStream(parent, writeStream); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception. + } + } + + @Test + public void createWriteStreamTest2() throws Exception { + WriteStream expectedResponse = + WriteStream.newBuilder() + .setName(WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]").toString()) + .setCreateTime(Timestamp.newBuilder().build()) + .setCommitTime(Timestamp.newBuilder().build()) + .setTableSchema(TableSchema.newBuilder().build()) + .setExternalId("external_id-1153075697") + .build(); + mockBigQueryWrite.addResponse(expectedResponse); + + String parent = "parent-995424086"; + WriteStream writeStream = WriteStream.newBuilder().build(); + + WriteStream actualResponse = client.createWriteStream(parent, writeStream); + Assert.assertEquals(expectedResponse, actualResponse); + + List actualRequests = mockBigQueryWrite.getRequests(); + Assert.assertEquals(1, actualRequests.size()); + CreateWriteStreamRequest actualRequest = ((CreateWriteStreamRequest) actualRequests.get(0)); + + Assert.assertEquals(parent, actualRequest.getParent()); + Assert.assertEquals(writeStream, actualRequest.getWriteStream()); + Assert.assertTrue( + channelProvider.isHeaderSent( + ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), + GaxGrpcProperties.getDefaultApiClientHeaderPattern())); + } + @Test + public void createWriteStreamExceptionTest2() throws Exception { + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); + mockBigQueryWrite.addException(exception); + + try { + String parent = "parent-995424086"; + WriteStream writeStream = WriteStream.newBuilder().build(); client.createWriteStream(parent, writeStream); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { - // Expected exception + // Expected exception. } } @Test - @SuppressWarnings("all") public void appendRowsTest() throws Exception { - long offset = 1019779949L; - AppendRowsResponse expectedResponse = AppendRowsResponse.newBuilder().setOffset(offset).build(); + AppendRowsResponse expectedResponse = + AppendRowsResponse.newBuilder().setUpdatedSchema(TableSchema.newBuilder().build()).build(); mockBigQueryWrite.addResponse(expectedResponse); - WriteStreamName writeStream = - WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]"); AppendRowsRequest request = - AppendRowsRequest.newBuilder().setWriteStream(writeStream.toString()).build(); + AppendRowsRequest.newBuilder() + .setWriteStream( + WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]").toString()) + .setOffset(Int64Value.newBuilder().build()) + .setIgnoreUnknownFields(true) + .build(); MockStreamObserver responseObserver = new MockStreamObserver<>(); @@ -153,14 +201,16 @@ public void appendRowsTest() throws Exception { } @Test - @SuppressWarnings("all") public void appendRowsExceptionTest() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockBigQueryWrite.addException(exception); - WriteStreamName writeStream = - WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]"); AppendRowsRequest request = - AppendRowsRequest.newBuilder().setWriteStream(writeStream.toString()).build(); + AppendRowsRequest.newBuilder() + .setWriteStream( + WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]").toString()) + .setOffset(Int64Value.newBuilder().build()) + .setIgnoreUnknownFields(true) + .build(); MockStreamObserver responseObserver = new MockStreamObserver<>(); @@ -176,18 +226,21 @@ public void appendRowsExceptionTest() throws Exception { Assert.fail("No exception thrown"); } catch (ExecutionException e) { Assert.assertTrue(e.getCause() instanceof InvalidArgumentException); - InvalidArgumentException apiException = (InvalidArgumentException) e.getCause(); + InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause()); Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode()); } } @Test - @SuppressWarnings("all") - public void getWriteStreamTest() { - WriteStreamName name2 = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]"); - String externalId = "externalId-1153075697"; + public void getWriteStreamTest() throws Exception { WriteStream expectedResponse = - WriteStream.newBuilder().setName(name2.toString()).setExternalId(externalId).build(); + WriteStream.newBuilder() + .setName(WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]").toString()) + .setCreateTime(Timestamp.newBuilder().build()) + .setCommitTime(Timestamp.newBuilder().build()) + .setTableSchema(TableSchema.newBuilder().build()) + .setExternalId("external_id-1153075697") + .build(); mockBigQueryWrite.addResponse(expectedResponse); WriteStreamName name = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]"); @@ -197,9 +250,9 @@ public void getWriteStreamTest() { List actualRequests = mockBigQueryWrite.getRequests(); Assert.assertEquals(1, actualRequests.size()); - GetWriteStreamRequest actualRequest = (GetWriteStreamRequest) actualRequests.get(0); + GetWriteStreamRequest actualRequest = ((GetWriteStreamRequest) actualRequests.get(0)); - Assert.assertEquals(name, WriteStreamName.parse(actualRequest.getName())); + Assert.assertEquals(name.toString(), actualRequest.getName()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), @@ -207,27 +260,65 @@ public void getWriteStreamTest() { } @Test - @SuppressWarnings("all") public void getWriteStreamExceptionTest() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockBigQueryWrite.addException(exception); try { WriteStreamName name = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]"); + client.getWriteStream(name); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception. + } + } + + @Test + public void getWriteStreamTest2() throws Exception { + WriteStream expectedResponse = + WriteStream.newBuilder() + .setName(WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]").toString()) + .setCreateTime(Timestamp.newBuilder().build()) + .setCommitTime(Timestamp.newBuilder().build()) + .setTableSchema(TableSchema.newBuilder().build()) + .setExternalId("external_id-1153075697") + .build(); + mockBigQueryWrite.addResponse(expectedResponse); + + String name = "name3373707"; + + WriteStream actualResponse = client.getWriteStream(name); + Assert.assertEquals(expectedResponse, actualResponse); + + List actualRequests = mockBigQueryWrite.getRequests(); + Assert.assertEquals(1, actualRequests.size()); + GetWriteStreamRequest actualRequest = ((GetWriteStreamRequest) actualRequests.get(0)); + + Assert.assertEquals(name, actualRequest.getName()); + Assert.assertTrue( + channelProvider.isHeaderSent( + ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), + GaxGrpcProperties.getDefaultApiClientHeaderPattern())); + } + + @Test + public void getWriteStreamExceptionTest2() throws Exception { + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); + mockBigQueryWrite.addException(exception); + try { + String name = "name3373707"; client.getWriteStream(name); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { - // Expected exception + // Expected exception. } } @Test - @SuppressWarnings("all") - public void finalizeWriteStreamTest() { - long rowCount = 1340416618L; + public void finalizeWriteStreamTest() throws Exception { FinalizeWriteStreamResponse expectedResponse = - FinalizeWriteStreamResponse.newBuilder().setRowCount(rowCount).build(); + FinalizeWriteStreamResponse.newBuilder().setRowCount(1340416618).build(); mockBigQueryWrite.addResponse(expectedResponse); WriteStreamName name = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]"); @@ -237,9 +328,9 @@ public void finalizeWriteStreamTest() { List actualRequests = mockBigQueryWrite.getRequests(); Assert.assertEquals(1, actualRequests.size()); - FinalizeWriteStreamRequest actualRequest = (FinalizeWriteStreamRequest) actualRequests.get(0); + FinalizeWriteStreamRequest actualRequest = ((FinalizeWriteStreamRequest) actualRequests.get(0)); - Assert.assertEquals(name, WriteStreamName.parse(actualRequest.getName())); + Assert.assertEquals(name.toString(), actualRequest.getName()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), @@ -247,26 +338,61 @@ public void finalizeWriteStreamTest() { } @Test - @SuppressWarnings("all") public void finalizeWriteStreamExceptionTest() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockBigQueryWrite.addException(exception); try { WriteStreamName name = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]"); + client.finalizeWriteStream(name); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception. + } + } + + @Test + public void finalizeWriteStreamTest2() throws Exception { + FinalizeWriteStreamResponse expectedResponse = + FinalizeWriteStreamResponse.newBuilder().setRowCount(1340416618).build(); + mockBigQueryWrite.addResponse(expectedResponse); + + String name = "name3373707"; + + FinalizeWriteStreamResponse actualResponse = client.finalizeWriteStream(name); + Assert.assertEquals(expectedResponse, actualResponse); + + List actualRequests = mockBigQueryWrite.getRequests(); + Assert.assertEquals(1, actualRequests.size()); + FinalizeWriteStreamRequest actualRequest = ((FinalizeWriteStreamRequest) actualRequests.get(0)); + + Assert.assertEquals(name, actualRequest.getName()); + Assert.assertTrue( + channelProvider.isHeaderSent( + ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), + GaxGrpcProperties.getDefaultApiClientHeaderPattern())); + } + @Test + public void finalizeWriteStreamExceptionTest2() throws Exception { + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); + mockBigQueryWrite.addException(exception); + + try { + String name = "name3373707"; client.finalizeWriteStream(name); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { - // Expected exception + // Expected exception. } } @Test - @SuppressWarnings("all") - public void batchCommitWriteStreamsTest() { + public void batchCommitWriteStreamsTest() throws Exception { BatchCommitWriteStreamsResponse expectedResponse = - BatchCommitWriteStreamsResponse.newBuilder().build(); + BatchCommitWriteStreamsResponse.newBuilder() + .setCommitTime(Timestamp.newBuilder().build()) + .build(); mockBigQueryWrite.addResponse(expectedResponse); String parent = "parent-995424086"; @@ -277,7 +403,7 @@ public void batchCommitWriteStreamsTest() { List actualRequests = mockBigQueryWrite.getRequests(); Assert.assertEquals(1, actualRequests.size()); BatchCommitWriteStreamsRequest actualRequest = - (BatchCommitWriteStreamsRequest) actualRequests.get(0); + ((BatchCommitWriteStreamsRequest) actualRequests.get(0)); Assert.assertEquals(parent, actualRequest.getParent()); Assert.assertTrue( @@ -287,26 +413,23 @@ public void batchCommitWriteStreamsTest() { } @Test - @SuppressWarnings("all") public void batchCommitWriteStreamsExceptionTest() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockBigQueryWrite.addException(exception); try { String parent = "parent-995424086"; - client.batchCommitWriteStreams(parent); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { - // Expected exception + // Expected exception. } } @Test - @SuppressWarnings("all") - public void flushRowsTest() { - long offset = 1019779949L; - FlushRowsResponse expectedResponse = FlushRowsResponse.newBuilder().setOffset(offset).build(); + public void flushRowsTest() throws Exception { + FlushRowsResponse expectedResponse = + FlushRowsResponse.newBuilder().setOffset(-1019779949).build(); mockBigQueryWrite.addResponse(expectedResponse); WriteStreamName writeStream = @@ -317,9 +440,9 @@ public void flushRowsTest() { List actualRequests = mockBigQueryWrite.getRequests(); Assert.assertEquals(1, actualRequests.size()); - FlushRowsRequest actualRequest = (FlushRowsRequest) actualRequests.get(0); + FlushRowsRequest actualRequest = ((FlushRowsRequest) actualRequests.get(0)); - Assert.assertEquals(writeStream, WriteStreamName.parse(actualRequest.getWriteStream())); + Assert.assertEquals(writeStream.toString(), actualRequest.getWriteStream()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), @@ -327,19 +450,53 @@ public void flushRowsTest() { } @Test - @SuppressWarnings("all") public void flushRowsExceptionTest() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockBigQueryWrite.addException(exception); try { WriteStreamName writeStream = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]"); + client.flushRows(writeStream); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception. + } + } + + @Test + public void flushRowsTest2() throws Exception { + FlushRowsResponse expectedResponse = + FlushRowsResponse.newBuilder().setOffset(-1019779949).build(); + mockBigQueryWrite.addResponse(expectedResponse); + + String writeStream = "write_stream-1431753760"; + FlushRowsResponse actualResponse = client.flushRows(writeStream); + Assert.assertEquals(expectedResponse, actualResponse); + + List actualRequests = mockBigQueryWrite.getRequests(); + Assert.assertEquals(1, actualRequests.size()); + FlushRowsRequest actualRequest = ((FlushRowsRequest) actualRequests.get(0)); + + Assert.assertEquals(writeStream, actualRequest.getWriteStream()); + Assert.assertTrue( + channelProvider.isHeaderSent( + ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), + GaxGrpcProperties.getDefaultApiClientHeaderPattern())); + } + + @Test + public void flushRowsExceptionTest2() throws Exception { + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); + mockBigQueryWrite.addException(exception); + + try { + String writeStream = "write_stream-1431753760"; client.flushRows(writeStream); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { - // Expected exception + // Expected exception. } } } diff --git a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/MockBigQueryRead.java b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/MockBigQueryRead.java index 26d09c0ef8..cd82e240fb 100644 --- a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/MockBigQueryRead.java +++ b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/MockBigQueryRead.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,6 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.bigquery.storage.v1beta2; import com.google.api.core.BetaApi; @@ -20,9 +21,10 @@ import com.google.protobuf.AbstractMessage; import io.grpc.ServerServiceDefinition; import java.util.List; +import javax.annotation.Generated; -@javax.annotation.Generated("by GAPIC") @BetaApi +@Generated("by gapic-generator-java") public class MockBigQueryRead implements MockGrpcService { private final MockBigQueryReadImpl serviceImpl; diff --git a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/MockBigQueryReadImpl.java b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/MockBigQueryReadImpl.java index 56d6b3d432..1482baca9c 100644 --- a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/MockBigQueryReadImpl.java +++ b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/MockBigQueryReadImpl.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,6 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.bigquery.storage.v1beta2; import com.google.api.core.BetaApi; @@ -23,9 +24,10 @@ import java.util.LinkedList; import java.util.List; import java.util.Queue; +import javax.annotation.Generated; -@javax.annotation.Generated("by GAPIC") @BetaApi +@Generated("by gapic-generator-java") public class MockBigQueryReadImpl extends BigQueryReadImplBase { private List requests; private Queue responses; @@ -62,10 +64,10 @@ public void createReadSession( Object response = responses.remove(); if (response instanceof ReadSession) { requests.add(request); - responseObserver.onNext((ReadSession) response); + responseObserver.onNext(((ReadSession) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { - responseObserver.onError((Exception) response); + responseObserver.onError(((Exception) response)); } else { responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); } @@ -76,10 +78,10 @@ public void readRows(ReadRowsRequest request, StreamObserver r Object response = responses.remove(); if (response instanceof ReadRowsResponse) { requests.add(request); - responseObserver.onNext((ReadRowsResponse) response); + responseObserver.onNext(((ReadRowsResponse) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { - responseObserver.onError((Exception) response); + responseObserver.onError(((Exception) response)); } else { responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); } @@ -91,10 +93,10 @@ public void splitReadStream( Object response = responses.remove(); if (response instanceof SplitReadStreamResponse) { requests.add(request); - responseObserver.onNext((SplitReadStreamResponse) response); + responseObserver.onNext(((SplitReadStreamResponse) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { - responseObserver.onError((Exception) response); + responseObserver.onError(((Exception) response)); } else { responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); } diff --git a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/MockBigQueryWrite.java b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/MockBigQueryWrite.java index ea99368e82..8adf63c1f2 100644 --- a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/MockBigQueryWrite.java +++ b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/MockBigQueryWrite.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,6 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.bigquery.storage.v1beta2; import com.google.api.core.BetaApi; @@ -20,9 +21,10 @@ import com.google.protobuf.AbstractMessage; import io.grpc.ServerServiceDefinition; import java.util.List; +import javax.annotation.Generated; -@javax.annotation.Generated("by GAPIC") @BetaApi +@Generated("by gapic-generator-java") public class MockBigQueryWrite implements MockGrpcService { private final MockBigQueryWriteImpl serviceImpl; diff --git a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/MockBigQueryWriteImpl.java b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/MockBigQueryWriteImpl.java index 654a52574d..078421f361 100644 --- a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/MockBigQueryWriteImpl.java +++ b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/MockBigQueryWriteImpl.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,6 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.bigquery.storage.v1beta2; import com.google.api.core.BetaApi; @@ -23,9 +24,10 @@ import java.util.LinkedList; import java.util.List; import java.util.Queue; +import javax.annotation.Generated; -@javax.annotation.Generated("by GAPIC") @BetaApi +@Generated("by gapic-generator-java") public class MockBigQueryWriteImpl extends BigQueryWriteImplBase { private List requests; private Queue responses; @@ -62,10 +64,10 @@ public void createWriteStream( Object response = responses.remove(); if (response instanceof WriteStream) { requests.add(request); - responseObserver.onNext((WriteStream) response); + responseObserver.onNext(((WriteStream) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { - responseObserver.onError((Exception) response); + responseObserver.onError(((Exception) response)); } else { responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); } @@ -81,9 +83,9 @@ public void onNext(AppendRowsRequest value) { requests.add(value); final Object response = responses.remove(); if (response instanceof AppendRowsResponse) { - responseObserver.onNext((AppendRowsResponse) response); + responseObserver.onNext(((AppendRowsResponse) response)); } else if (response instanceof Exception) { - responseObserver.onError((Exception) response); + responseObserver.onError(((Exception) response)); } else { responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); } @@ -108,10 +110,10 @@ public void getWriteStream( Object response = responses.remove(); if (response instanceof WriteStream) { requests.add(request); - responseObserver.onNext((WriteStream) response); + responseObserver.onNext(((WriteStream) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { - responseObserver.onError((Exception) response); + responseObserver.onError(((Exception) response)); } else { responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); } @@ -124,10 +126,10 @@ public void finalizeWriteStream( Object response = responses.remove(); if (response instanceof FinalizeWriteStreamResponse) { requests.add(request); - responseObserver.onNext((FinalizeWriteStreamResponse) response); + responseObserver.onNext(((FinalizeWriteStreamResponse) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { - responseObserver.onError((Exception) response); + responseObserver.onError(((Exception) response)); } else { responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); } @@ -140,10 +142,10 @@ public void batchCommitWriteStreams( Object response = responses.remove(); if (response instanceof BatchCommitWriteStreamsResponse) { requests.add(request); - responseObserver.onNext((BatchCommitWriteStreamsResponse) response); + responseObserver.onNext(((BatchCommitWriteStreamsResponse) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { - responseObserver.onError((Exception) response); + responseObserver.onError(((Exception) response)); } else { responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); } @@ -155,10 +157,10 @@ public void flushRows( Object response = responses.remove(); if (response instanceof FlushRowsResponse) { requests.add(request); - responseObserver.onNext((FlushRowsResponse) response); + responseObserver.onNext(((FlushRowsResponse) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { - responseObserver.onError((Exception) response); + responseObserver.onError(((Exception) response)); } else { responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); } diff --git a/proto-google-cloud-bigquerystorage-v1/src/main/java/com/google/cloud/bigquery/storage/v1/ProjectName.java b/proto-google-cloud-bigquerystorage-v1/src/main/java/com/google/cloud/bigquery/storage/v1/ProjectName.java index 0d8b2c2e12..f87488cdd7 100644 --- a/proto-google-cloud-bigquerystorage-v1/src/main/java/com/google/cloud/bigquery/storage/v1/ProjectName.java +++ b/proto-google-cloud-bigquerystorage-v1/src/main/java/com/google/cloud/bigquery/storage/v1/ProjectName.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -23,18 +23,26 @@ import java.util.ArrayList; import java.util.List; import java.util.Map; +import java.util.Objects; +import javax.annotation.Generated; -/** AUTO-GENERATED DOCUMENTATION AND CLASS */ -@javax.annotation.Generated("by GAPIC protoc plugin") +// AUTO-GENERATED DOCUMENTATION AND CLASS. +@Generated("by gapic-generator-java") public class ProjectName implements ResourceName { - - private static final PathTemplate PATH_TEMPLATE = + private static final PathTemplate PROJECT = PathTemplate.createWithoutUrlEncoding("projects/{project}"); - private volatile Map fieldValuesMap; - private final String project; + @Deprecated + protected ProjectName() { + project = null; + } + + private ProjectName(Builder builder) { + project = Preconditions.checkNotNull(builder.getProject()); + } + public String getProject() { return project; } @@ -47,10 +55,6 @@ public Builder toBuilder() { return new Builder(this); } - private ProjectName(Builder builder) { - project = Preconditions.checkNotNull(builder.getProject()); - } - public static ProjectName of(String project) { return newBuilder().setProject(project).build(); } @@ -64,7 +68,7 @@ public static ProjectName parse(String formattedString) { return null; } Map matchMap = - PATH_TEMPLATE.validatedMatch( + PROJECT.validatedMatch( formattedString, "ProjectName.parse: formattedString not in valid format"); return of(matchMap.get("project")); } @@ -78,9 +82,9 @@ public static List parseList(List formattedStrings) { } public static List toStringList(List values) { - List list = new ArrayList(values.size()); + List list = new ArrayList<>(values.size()); for (ProjectName value : values) { - if (value == null) { + if (Objects.isNull(value)) { list.add(""); } else { list.add(value.toString()); @@ -90,15 +94,18 @@ public static List toStringList(List values) { } public static boolean isParsableFrom(String formattedString) { - return PATH_TEMPLATE.matches(formattedString); + return PROJECT.matches(formattedString); } + @Override public Map getFieldValuesMap() { - if (fieldValuesMap == null) { + if (Objects.isNull(fieldValuesMap)) { synchronized (this) { - if (fieldValuesMap == null) { + if (Objects.isNull(fieldValuesMap)) { ImmutableMap.Builder fieldMapBuilder = ImmutableMap.builder(); - fieldMapBuilder.put("project", project); + if (!Objects.isNull(project)) { + fieldMapBuilder.put("project", project); + } fieldValuesMap = fieldMapBuilder.build(); } } @@ -112,14 +119,35 @@ public String getFieldValue(String fieldName) { @Override public String toString() { - return PATH_TEMPLATE.instantiate("project", project); + return PROJECT.instantiate("project", project); } - /** Builder for ProjectName. */ - public static class Builder { + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o != null || getClass() == o.getClass()) { + ProjectName that = ((ProjectName) o); + return Objects.equals(this.project, that.project); + } + return false; + } + @Override + public int hashCode() { + int h = 1; + h *= 1000003; + h ^= Objects.hashCode(project); + return h; + } + + /** Builder for projects/{project}. */ + public static class Builder { private String project; + protected Builder() {} + public String getProject() { return project; } @@ -129,8 +157,6 @@ public Builder setProject(String project) { return this; } - private Builder() {} - private Builder(ProjectName projectName) { project = projectName.project; } @@ -139,24 +165,4 @@ public ProjectName build() { return new ProjectName(this); } } - - @Override - public boolean equals(Object o) { - if (o == this) { - return true; - } - if (o instanceof ProjectName) { - ProjectName that = (ProjectName) o; - return (this.project.equals(that.project)); - } - return false; - } - - @Override - public int hashCode() { - int h = 1; - h *= 1000003; - h ^= project.hashCode(); - return h; - } } diff --git a/proto-google-cloud-bigquerystorage-v1/src/main/java/com/google/cloud/bigquery/storage/v1/ReadSessionName.java b/proto-google-cloud-bigquerystorage-v1/src/main/java/com/google/cloud/bigquery/storage/v1/ReadSessionName.java deleted file mode 100644 index 4aa5209ddd..0000000000 --- a/proto-google-cloud-bigquerystorage-v1/src/main/java/com/google/cloud/bigquery/storage/v1/ReadSessionName.java +++ /dev/null @@ -1,210 +0,0 @@ -/* - * Copyright 2020 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.google.cloud.bigquery.storage.v1; - -import com.google.api.pathtemplate.PathTemplate; -import com.google.api.resourcenames.ResourceName; -import com.google.common.base.Preconditions; -import com.google.common.collect.ImmutableMap; -import java.util.ArrayList; -import java.util.List; -import java.util.Map; - -/** AUTO-GENERATED DOCUMENTATION AND CLASS */ -@javax.annotation.Generated("by GAPIC protoc plugin") -public class ReadSessionName implements ResourceName { - - private static final PathTemplate PATH_TEMPLATE = - PathTemplate.createWithoutUrlEncoding( - "projects/{project}/locations/{location}/sessions/{session}"); - - private volatile Map fieldValuesMap; - - private final String project; - private final String location; - private final String session; - - public String getProject() { - return project; - } - - public String getLocation() { - return location; - } - - public String getSession() { - return session; - } - - public static Builder newBuilder() { - return new Builder(); - } - - public Builder toBuilder() { - return new Builder(this); - } - - private ReadSessionName(Builder builder) { - project = Preconditions.checkNotNull(builder.getProject()); - location = Preconditions.checkNotNull(builder.getLocation()); - session = Preconditions.checkNotNull(builder.getSession()); - } - - public static ReadSessionName of(String project, String location, String session) { - return newBuilder().setProject(project).setLocation(location).setSession(session).build(); - } - - public static String format(String project, String location, String session) { - return newBuilder() - .setProject(project) - .setLocation(location) - .setSession(session) - .build() - .toString(); - } - - public static ReadSessionName parse(String formattedString) { - if (formattedString.isEmpty()) { - return null; - } - Map matchMap = - PATH_TEMPLATE.validatedMatch( - formattedString, "ReadSessionName.parse: formattedString not in valid format"); - return of(matchMap.get("project"), matchMap.get("location"), matchMap.get("session")); - } - - public static List parseList(List formattedStrings) { - List list = new ArrayList<>(formattedStrings.size()); - for (String formattedString : formattedStrings) { - list.add(parse(formattedString)); - } - return list; - } - - public static List toStringList(List values) { - List list = new ArrayList(values.size()); - for (ReadSessionName value : values) { - if (value == null) { - list.add(""); - } else { - list.add(value.toString()); - } - } - return list; - } - - public static boolean isParsableFrom(String formattedString) { - return PATH_TEMPLATE.matches(formattedString); - } - - public Map getFieldValuesMap() { - if (fieldValuesMap == null) { - synchronized (this) { - if (fieldValuesMap == null) { - ImmutableMap.Builder fieldMapBuilder = ImmutableMap.builder(); - fieldMapBuilder.put("project", project); - fieldMapBuilder.put("location", location); - fieldMapBuilder.put("session", session); - fieldValuesMap = fieldMapBuilder.build(); - } - } - } - return fieldValuesMap; - } - - public String getFieldValue(String fieldName) { - return getFieldValuesMap().get(fieldName); - } - - @Override - public String toString() { - return PATH_TEMPLATE.instantiate("project", project, "location", location, "session", session); - } - - /** Builder for ReadSessionName. */ - public static class Builder { - - private String project; - private String location; - private String session; - - public String getProject() { - return project; - } - - public String getLocation() { - return location; - } - - public String getSession() { - return session; - } - - public Builder setProject(String project) { - this.project = project; - return this; - } - - public Builder setLocation(String location) { - this.location = location; - return this; - } - - public Builder setSession(String session) { - this.session = session; - return this; - } - - private Builder() {} - - private Builder(ReadSessionName readSessionName) { - project = readSessionName.project; - location = readSessionName.location; - session = readSessionName.session; - } - - public ReadSessionName build() { - return new ReadSessionName(this); - } - } - - @Override - public boolean equals(Object o) { - if (o == this) { - return true; - } - if (o instanceof ReadSessionName) { - ReadSessionName that = (ReadSessionName) o; - return (this.project.equals(that.project)) - && (this.location.equals(that.location)) - && (this.session.equals(that.session)); - } - return false; - } - - @Override - public int hashCode() { - int h = 1; - h *= 1000003; - h ^= project.hashCode(); - h *= 1000003; - h ^= location.hashCode(); - h *= 1000003; - h ^= session.hashCode(); - return h; - } -} diff --git a/proto-google-cloud-bigquerystorage-v1/src/main/java/com/google/cloud/bigquery/storage/v1/ReadStreamName.java b/proto-google-cloud-bigquerystorage-v1/src/main/java/com/google/cloud/bigquery/storage/v1/ReadStreamName.java index 9c8236c663..40301ee1f2 100644 --- a/proto-google-cloud-bigquerystorage-v1/src/main/java/com/google/cloud/bigquery/storage/v1/ReadStreamName.java +++ b/proto-google-cloud-bigquerystorage-v1/src/main/java/com/google/cloud/bigquery/storage/v1/ReadStreamName.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -23,22 +23,36 @@ import java.util.ArrayList; import java.util.List; import java.util.Map; +import java.util.Objects; +import javax.annotation.Generated; -/** AUTO-GENERATED DOCUMENTATION AND CLASS */ -@javax.annotation.Generated("by GAPIC protoc plugin") +// AUTO-GENERATED DOCUMENTATION AND CLASS. +@Generated("by gapic-generator-java") public class ReadStreamName implements ResourceName { - - private static final PathTemplate PATH_TEMPLATE = + private static final PathTemplate PROJECT_LOCATION_SESSION_STREAM = PathTemplate.createWithoutUrlEncoding( "projects/{project}/locations/{location}/sessions/{session}/streams/{stream}"); - private volatile Map fieldValuesMap; - private final String project; private final String location; private final String session; private final String stream; + @Deprecated + protected ReadStreamName() { + project = null; + location = null; + session = null; + stream = null; + } + + private ReadStreamName(Builder builder) { + project = Preconditions.checkNotNull(builder.getProject()); + location = Preconditions.checkNotNull(builder.getLocation()); + session = Preconditions.checkNotNull(builder.getSession()); + stream = Preconditions.checkNotNull(builder.getStream()); + } + public String getProject() { return project; } @@ -63,13 +77,6 @@ public Builder toBuilder() { return new Builder(this); } - private ReadStreamName(Builder builder) { - project = Preconditions.checkNotNull(builder.getProject()); - location = Preconditions.checkNotNull(builder.getLocation()); - session = Preconditions.checkNotNull(builder.getSession()); - stream = Preconditions.checkNotNull(builder.getStream()); - } - public static ReadStreamName of(String project, String location, String session, String stream) { return newBuilder() .setProject(project) @@ -94,7 +101,7 @@ public static ReadStreamName parse(String formattedString) { return null; } Map matchMap = - PATH_TEMPLATE.validatedMatch( + PROJECT_LOCATION_SESSION_STREAM.validatedMatch( formattedString, "ReadStreamName.parse: formattedString not in valid format"); return of( matchMap.get("project"), @@ -112,9 +119,9 @@ public static List parseList(List formattedStrings) { } public static List toStringList(List values) { - List list = new ArrayList(values.size()); + List list = new ArrayList<>(values.size()); for (ReadStreamName value : values) { - if (value == null) { + if (Objects.isNull(value)) { list.add(""); } else { list.add(value.toString()); @@ -124,18 +131,27 @@ public static List toStringList(List values) { } public static boolean isParsableFrom(String formattedString) { - return PATH_TEMPLATE.matches(formattedString); + return PROJECT_LOCATION_SESSION_STREAM.matches(formattedString); } + @Override public Map getFieldValuesMap() { - if (fieldValuesMap == null) { + if (Objects.isNull(fieldValuesMap)) { synchronized (this) { - if (fieldValuesMap == null) { + if (Objects.isNull(fieldValuesMap)) { ImmutableMap.Builder fieldMapBuilder = ImmutableMap.builder(); - fieldMapBuilder.put("project", project); - fieldMapBuilder.put("location", location); - fieldMapBuilder.put("session", session); - fieldMapBuilder.put("stream", stream); + if (!Objects.isNull(project)) { + fieldMapBuilder.put("project", project); + } + if (!Objects.isNull(location)) { + fieldMapBuilder.put("location", location); + } + if (!Objects.isNull(session)) { + fieldMapBuilder.put("session", session); + } + if (!Objects.isNull(stream)) { + fieldMapBuilder.put("stream", stream); + } fieldValuesMap = fieldMapBuilder.build(); } } @@ -149,18 +165,48 @@ public String getFieldValue(String fieldName) { @Override public String toString() { - return PATH_TEMPLATE.instantiate( + return PROJECT_LOCATION_SESSION_STREAM.instantiate( "project", project, "location", location, "session", session, "stream", stream); } - /** Builder for ReadStreamName. */ - public static class Builder { + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o != null || getClass() == o.getClass()) { + ReadStreamName that = ((ReadStreamName) o); + return Objects.equals(this.project, that.project) + && Objects.equals(this.location, that.location) + && Objects.equals(this.session, that.session) + && Objects.equals(this.stream, that.stream); + } + return false; + } + @Override + public int hashCode() { + int h = 1; + h *= 1000003; + h ^= Objects.hashCode(project); + h *= 1000003; + h ^= Objects.hashCode(location); + h *= 1000003; + h ^= Objects.hashCode(session); + h *= 1000003; + h ^= Objects.hashCode(stream); + return h; + } + + /** Builder for projects/{project}/locations/{location}/sessions/{session}/streams/{stream}. */ + public static class Builder { private String project; private String location; private String session; private String stream; + protected Builder() {} + public String getProject() { return project; } @@ -197,8 +243,6 @@ public Builder setStream(String stream) { return this; } - private Builder() {} - private Builder(ReadStreamName readStreamName) { project = readStreamName.project; location = readStreamName.location; @@ -210,33 +254,4 @@ public ReadStreamName build() { return new ReadStreamName(this); } } - - @Override - public boolean equals(Object o) { - if (o == this) { - return true; - } - if (o instanceof ReadStreamName) { - ReadStreamName that = (ReadStreamName) o; - return (this.project.equals(that.project)) - && (this.location.equals(that.location)) - && (this.session.equals(that.session)) - && (this.stream.equals(that.stream)); - } - return false; - } - - @Override - public int hashCode() { - int h = 1; - h *= 1000003; - h ^= project.hashCode(); - h *= 1000003; - h ^= location.hashCode(); - h *= 1000003; - h ^= session.hashCode(); - h *= 1000003; - h ^= stream.hashCode(); - return h; - } } diff --git a/proto-google-cloud-bigquerystorage-v1/src/main/java/com/google/cloud/bigquery/storage/v1/TableName.java b/proto-google-cloud-bigquerystorage-v1/src/main/java/com/google/cloud/bigquery/storage/v1/TableName.java deleted file mode 100644 index 4478859799..0000000000 --- a/proto-google-cloud-bigquerystorage-v1/src/main/java/com/google/cloud/bigquery/storage/v1/TableName.java +++ /dev/null @@ -1,204 +0,0 @@ -/* - * Copyright 2020 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.google.cloud.bigquery.storage.v1; - -import com.google.api.pathtemplate.PathTemplate; -import com.google.api.resourcenames.ResourceName; -import com.google.common.base.Preconditions; -import com.google.common.collect.ImmutableMap; -import java.util.ArrayList; -import java.util.List; -import java.util.Map; - -/** AUTO-GENERATED DOCUMENTATION AND CLASS */ -@javax.annotation.Generated("by GAPIC protoc plugin") -public class TableName implements ResourceName { - - private static final PathTemplate PATH_TEMPLATE = - PathTemplate.createWithoutUrlEncoding("projects/{project}/datasets/{dataset}/tables/{table}"); - - private volatile Map fieldValuesMap; - - private final String project; - private final String dataset; - private final String table; - - public String getProject() { - return project; - } - - public String getDataset() { - return dataset; - } - - public String getTable() { - return table; - } - - public static Builder newBuilder() { - return new Builder(); - } - - public Builder toBuilder() { - return new Builder(this); - } - - private TableName(Builder builder) { - project = Preconditions.checkNotNull(builder.getProject()); - dataset = Preconditions.checkNotNull(builder.getDataset()); - table = Preconditions.checkNotNull(builder.getTable()); - } - - public static TableName of(String project, String dataset, String table) { - return newBuilder().setProject(project).setDataset(dataset).setTable(table).build(); - } - - public static String format(String project, String dataset, String table) { - return newBuilder().setProject(project).setDataset(dataset).setTable(table).build().toString(); - } - - public static TableName parse(String formattedString) { - if (formattedString.isEmpty()) { - return null; - } - Map matchMap = - PATH_TEMPLATE.validatedMatch( - formattedString, "TableName.parse: formattedString not in valid format"); - return of(matchMap.get("project"), matchMap.get("dataset"), matchMap.get("table")); - } - - public static List parseList(List formattedStrings) { - List list = new ArrayList<>(formattedStrings.size()); - for (String formattedString : formattedStrings) { - list.add(parse(formattedString)); - } - return list; - } - - public static List toStringList(List values) { - List list = new ArrayList(values.size()); - for (TableName value : values) { - if (value == null) { - list.add(""); - } else { - list.add(value.toString()); - } - } - return list; - } - - public static boolean isParsableFrom(String formattedString) { - return PATH_TEMPLATE.matches(formattedString); - } - - public Map getFieldValuesMap() { - if (fieldValuesMap == null) { - synchronized (this) { - if (fieldValuesMap == null) { - ImmutableMap.Builder fieldMapBuilder = ImmutableMap.builder(); - fieldMapBuilder.put("project", project); - fieldMapBuilder.put("dataset", dataset); - fieldMapBuilder.put("table", table); - fieldValuesMap = fieldMapBuilder.build(); - } - } - } - return fieldValuesMap; - } - - public String getFieldValue(String fieldName) { - return getFieldValuesMap().get(fieldName); - } - - @Override - public String toString() { - return PATH_TEMPLATE.instantiate("project", project, "dataset", dataset, "table", table); - } - - /** Builder for TableName. */ - public static class Builder { - - private String project; - private String dataset; - private String table; - - public String getProject() { - return project; - } - - public String getDataset() { - return dataset; - } - - public String getTable() { - return table; - } - - public Builder setProject(String project) { - this.project = project; - return this; - } - - public Builder setDataset(String dataset) { - this.dataset = dataset; - return this; - } - - public Builder setTable(String table) { - this.table = table; - return this; - } - - private Builder() {} - - private Builder(TableName tableName) { - project = tableName.project; - dataset = tableName.dataset; - table = tableName.table; - } - - public TableName build() { - return new TableName(this); - } - } - - @Override - public boolean equals(Object o) { - if (o == this) { - return true; - } - if (o instanceof TableName) { - TableName that = (TableName) o; - return (this.project.equals(that.project)) - && (this.dataset.equals(that.dataset)) - && (this.table.equals(that.table)); - } - return false; - } - - @Override - public int hashCode() { - int h = 1; - h *= 1000003; - h ^= project.hashCode(); - h *= 1000003; - h ^= dataset.hashCode(); - h *= 1000003; - h ^= table.hashCode(); - return h; - } -} diff --git a/proto-google-cloud-bigquerystorage-v1alpha2/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/TableName.java b/proto-google-cloud-bigquerystorage-v1alpha2/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/TableName.java index 47ab519a3a..2041be7e62 100644 --- a/proto-google-cloud-bigquerystorage-v1alpha2/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/TableName.java +++ b/proto-google-cloud-bigquerystorage-v1alpha2/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/TableName.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -23,20 +23,32 @@ import java.util.ArrayList; import java.util.List; import java.util.Map; +import java.util.Objects; +import javax.annotation.Generated; -/** AUTO-GENERATED DOCUMENTATION AND CLASS */ -@javax.annotation.Generated("by GAPIC protoc plugin") +// AUTO-GENERATED DOCUMENTATION AND CLASS. +@Generated("by gapic-generator-java") public class TableName implements ResourceName { - - private static final PathTemplate PATH_TEMPLATE = + private static final PathTemplate PROJECT_DATASET_TABLE = PathTemplate.createWithoutUrlEncoding("projects/{project}/datasets/{dataset}/tables/{table}"); - private volatile Map fieldValuesMap; - private final String project; private final String dataset; private final String table; + @Deprecated + protected TableName() { + project = null; + dataset = null; + table = null; + } + + private TableName(Builder builder) { + project = Preconditions.checkNotNull(builder.getProject()); + dataset = Preconditions.checkNotNull(builder.getDataset()); + table = Preconditions.checkNotNull(builder.getTable()); + } + public String getProject() { return project; } @@ -57,12 +69,6 @@ public Builder toBuilder() { return new Builder(this); } - private TableName(Builder builder) { - project = Preconditions.checkNotNull(builder.getProject()); - dataset = Preconditions.checkNotNull(builder.getDataset()); - table = Preconditions.checkNotNull(builder.getTable()); - } - public static TableName of(String project, String dataset, String table) { return newBuilder().setProject(project).setDataset(dataset).setTable(table).build(); } @@ -76,7 +82,7 @@ public static TableName parse(String formattedString) { return null; } Map matchMap = - PATH_TEMPLATE.validatedMatch( + PROJECT_DATASET_TABLE.validatedMatch( formattedString, "TableName.parse: formattedString not in valid format"); return of(matchMap.get("project"), matchMap.get("dataset"), matchMap.get("table")); } @@ -90,9 +96,9 @@ public static List parseList(List formattedStrings) { } public static List toStringList(List values) { - List list = new ArrayList(values.size()); + List list = new ArrayList<>(values.size()); for (TableName value : values) { - if (value == null) { + if (Objects.isNull(value)) { list.add(""); } else { list.add(value.toString()); @@ -102,17 +108,24 @@ public static List toStringList(List values) { } public static boolean isParsableFrom(String formattedString) { - return PATH_TEMPLATE.matches(formattedString); + return PROJECT_DATASET_TABLE.matches(formattedString); } + @Override public Map getFieldValuesMap() { - if (fieldValuesMap == null) { + if (Objects.isNull(fieldValuesMap)) { synchronized (this) { - if (fieldValuesMap == null) { + if (Objects.isNull(fieldValuesMap)) { ImmutableMap.Builder fieldMapBuilder = ImmutableMap.builder(); - fieldMapBuilder.put("project", project); - fieldMapBuilder.put("dataset", dataset); - fieldMapBuilder.put("table", table); + if (!Objects.isNull(project)) { + fieldMapBuilder.put("project", project); + } + if (!Objects.isNull(dataset)) { + fieldMapBuilder.put("dataset", dataset); + } + if (!Objects.isNull(table)) { + fieldMapBuilder.put("table", table); + } fieldValuesMap = fieldMapBuilder.build(); } } @@ -126,16 +139,44 @@ public String getFieldValue(String fieldName) { @Override public String toString() { - return PATH_TEMPLATE.instantiate("project", project, "dataset", dataset, "table", table); + return PROJECT_DATASET_TABLE.instantiate( + "project", project, "dataset", dataset, "table", table); } - /** Builder for TableName. */ - public static class Builder { + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o != null || getClass() == o.getClass()) { + TableName that = ((TableName) o); + return Objects.equals(this.project, that.project) + && Objects.equals(this.dataset, that.dataset) + && Objects.equals(this.table, that.table); + } + return false; + } + @Override + public int hashCode() { + int h = 1; + h *= 1000003; + h ^= Objects.hashCode(project); + h *= 1000003; + h ^= Objects.hashCode(dataset); + h *= 1000003; + h ^= Objects.hashCode(table); + return h; + } + + /** Builder for projects/{project}/datasets/{dataset}/tables/{table}. */ + public static class Builder { private String project; private String dataset; private String table; + protected Builder() {} + public String getProject() { return project; } @@ -163,8 +204,6 @@ public Builder setTable(String table) { return this; } - private Builder() {} - private Builder(TableName tableName) { project = tableName.project; dataset = tableName.dataset; @@ -175,30 +214,4 @@ public TableName build() { return new TableName(this); } } - - @Override - public boolean equals(Object o) { - if (o == this) { - return true; - } - if (o instanceof TableName) { - TableName that = (TableName) o; - return (this.project.equals(that.project)) - && (this.dataset.equals(that.dataset)) - && (this.table.equals(that.table)); - } - return false; - } - - @Override - public int hashCode() { - int h = 1; - h *= 1000003; - h ^= project.hashCode(); - h *= 1000003; - h ^= dataset.hashCode(); - h *= 1000003; - h ^= table.hashCode(); - return h; - } } diff --git a/proto-google-cloud-bigquerystorage-v1alpha2/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/WriteStreamName.java b/proto-google-cloud-bigquerystorage-v1alpha2/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/WriteStreamName.java index dbc4bd48a7..2ef24c19fa 100644 --- a/proto-google-cloud-bigquerystorage-v1alpha2/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/WriteStreamName.java +++ b/proto-google-cloud-bigquerystorage-v1alpha2/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/WriteStreamName.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -23,22 +23,36 @@ import java.util.ArrayList; import java.util.List; import java.util.Map; +import java.util.Objects; +import javax.annotation.Generated; -/** AUTO-GENERATED DOCUMENTATION AND CLASS */ -@javax.annotation.Generated("by GAPIC protoc plugin") +// AUTO-GENERATED DOCUMENTATION AND CLASS. +@Generated("by gapic-generator-java") public class WriteStreamName implements ResourceName { - - private static final PathTemplate PATH_TEMPLATE = + private static final PathTemplate PROJECT_DATASET_TABLE_STREAM = PathTemplate.createWithoutUrlEncoding( "projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}"); - private volatile Map fieldValuesMap; - private final String project; private final String dataset; private final String table; private final String stream; + @Deprecated + protected WriteStreamName() { + project = null; + dataset = null; + table = null; + stream = null; + } + + private WriteStreamName(Builder builder) { + project = Preconditions.checkNotNull(builder.getProject()); + dataset = Preconditions.checkNotNull(builder.getDataset()); + table = Preconditions.checkNotNull(builder.getTable()); + stream = Preconditions.checkNotNull(builder.getStream()); + } + public String getProject() { return project; } @@ -63,13 +77,6 @@ public Builder toBuilder() { return new Builder(this); } - private WriteStreamName(Builder builder) { - project = Preconditions.checkNotNull(builder.getProject()); - dataset = Preconditions.checkNotNull(builder.getDataset()); - table = Preconditions.checkNotNull(builder.getTable()); - stream = Preconditions.checkNotNull(builder.getStream()); - } - public static WriteStreamName of(String project, String dataset, String table, String stream) { return newBuilder() .setProject(project) @@ -94,7 +101,7 @@ public static WriteStreamName parse(String formattedString) { return null; } Map matchMap = - PATH_TEMPLATE.validatedMatch( + PROJECT_DATASET_TABLE_STREAM.validatedMatch( formattedString, "WriteStreamName.parse: formattedString not in valid format"); return of( matchMap.get("project"), @@ -112,9 +119,9 @@ public static List parseList(List formattedStrings) { } public static List toStringList(List values) { - List list = new ArrayList(values.size()); + List list = new ArrayList<>(values.size()); for (WriteStreamName value : values) { - if (value == null) { + if (Objects.isNull(value)) { list.add(""); } else { list.add(value.toString()); @@ -124,18 +131,27 @@ public static List toStringList(List values) { } public static boolean isParsableFrom(String formattedString) { - return PATH_TEMPLATE.matches(formattedString); + return PROJECT_DATASET_TABLE_STREAM.matches(formattedString); } + @Override public Map getFieldValuesMap() { - if (fieldValuesMap == null) { + if (Objects.isNull(fieldValuesMap)) { synchronized (this) { - if (fieldValuesMap == null) { + if (Objects.isNull(fieldValuesMap)) { ImmutableMap.Builder fieldMapBuilder = ImmutableMap.builder(); - fieldMapBuilder.put("project", project); - fieldMapBuilder.put("dataset", dataset); - fieldMapBuilder.put("table", table); - fieldMapBuilder.put("stream", stream); + if (!Objects.isNull(project)) { + fieldMapBuilder.put("project", project); + } + if (!Objects.isNull(dataset)) { + fieldMapBuilder.put("dataset", dataset); + } + if (!Objects.isNull(table)) { + fieldMapBuilder.put("table", table); + } + if (!Objects.isNull(stream)) { + fieldMapBuilder.put("stream", stream); + } fieldValuesMap = fieldMapBuilder.build(); } } @@ -149,18 +165,48 @@ public String getFieldValue(String fieldName) { @Override public String toString() { - return PATH_TEMPLATE.instantiate( + return PROJECT_DATASET_TABLE_STREAM.instantiate( "project", project, "dataset", dataset, "table", table, "stream", stream); } - /** Builder for WriteStreamName. */ - public static class Builder { + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o != null || getClass() == o.getClass()) { + WriteStreamName that = ((WriteStreamName) o); + return Objects.equals(this.project, that.project) + && Objects.equals(this.dataset, that.dataset) + && Objects.equals(this.table, that.table) + && Objects.equals(this.stream, that.stream); + } + return false; + } + @Override + public int hashCode() { + int h = 1; + h *= 1000003; + h ^= Objects.hashCode(project); + h *= 1000003; + h ^= Objects.hashCode(dataset); + h *= 1000003; + h ^= Objects.hashCode(table); + h *= 1000003; + h ^= Objects.hashCode(stream); + return h; + } + + /** Builder for projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}. */ + public static class Builder { private String project; private String dataset; private String table; private String stream; + protected Builder() {} + public String getProject() { return project; } @@ -197,8 +243,6 @@ public Builder setStream(String stream) { return this; } - private Builder() {} - private Builder(WriteStreamName writeStreamName) { project = writeStreamName.project; dataset = writeStreamName.dataset; @@ -210,33 +254,4 @@ public WriteStreamName build() { return new WriteStreamName(this); } } - - @Override - public boolean equals(Object o) { - if (o == this) { - return true; - } - if (o instanceof WriteStreamName) { - WriteStreamName that = (WriteStreamName) o; - return (this.project.equals(that.project)) - && (this.dataset.equals(that.dataset)) - && (this.table.equals(that.table)) - && (this.stream.equals(that.stream)); - } - return false; - } - - @Override - public int hashCode() { - int h = 1; - h *= 1000003; - h ^= project.hashCode(); - h *= 1000003; - h ^= dataset.hashCode(); - h *= 1000003; - h ^= table.hashCode(); - h *= 1000003; - h ^= stream.hashCode(); - return h; - } } diff --git a/proto-google-cloud-bigquerystorage-v1beta1/src/main/java/com/google/cloud/bigquery/storage/v1beta1/ProjectName.java b/proto-google-cloud-bigquerystorage-v1beta1/src/main/java/com/google/cloud/bigquery/storage/v1beta1/ProjectName.java index 3ffe5f3360..4f0ef431f0 100644 --- a/proto-google-cloud-bigquerystorage-v1beta1/src/main/java/com/google/cloud/bigquery/storage/v1beta1/ProjectName.java +++ b/proto-google-cloud-bigquerystorage-v1beta1/src/main/java/com/google/cloud/bigquery/storage/v1beta1/ProjectName.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -23,18 +23,26 @@ import java.util.ArrayList; import java.util.List; import java.util.Map; +import java.util.Objects; +import javax.annotation.Generated; -/** AUTO-GENERATED DOCUMENTATION AND CLASS */ -@javax.annotation.Generated("by GAPIC protoc plugin") +// AUTO-GENERATED DOCUMENTATION AND CLASS. +@Generated("by gapic-generator-java") public class ProjectName implements ResourceName { - - private static final PathTemplate PATH_TEMPLATE = + private static final PathTemplate PROJECT = PathTemplate.createWithoutUrlEncoding("projects/{project}"); - private volatile Map fieldValuesMap; - private final String project; + @Deprecated + protected ProjectName() { + project = null; + } + + private ProjectName(Builder builder) { + project = Preconditions.checkNotNull(builder.getProject()); + } + public String getProject() { return project; } @@ -47,10 +55,6 @@ public Builder toBuilder() { return new Builder(this); } - private ProjectName(Builder builder) { - project = Preconditions.checkNotNull(builder.getProject()); - } - public static ProjectName of(String project) { return newBuilder().setProject(project).build(); } @@ -64,7 +68,7 @@ public static ProjectName parse(String formattedString) { return null; } Map matchMap = - PATH_TEMPLATE.validatedMatch( + PROJECT.validatedMatch( formattedString, "ProjectName.parse: formattedString not in valid format"); return of(matchMap.get("project")); } @@ -78,9 +82,9 @@ public static List parseList(List formattedStrings) { } public static List toStringList(List values) { - List list = new ArrayList(values.size()); + List list = new ArrayList<>(values.size()); for (ProjectName value : values) { - if (value == null) { + if (Objects.isNull(value)) { list.add(""); } else { list.add(value.toString()); @@ -90,15 +94,18 @@ public static List toStringList(List values) { } public static boolean isParsableFrom(String formattedString) { - return PATH_TEMPLATE.matches(formattedString); + return PROJECT.matches(formattedString); } + @Override public Map getFieldValuesMap() { - if (fieldValuesMap == null) { + if (Objects.isNull(fieldValuesMap)) { synchronized (this) { - if (fieldValuesMap == null) { + if (Objects.isNull(fieldValuesMap)) { ImmutableMap.Builder fieldMapBuilder = ImmutableMap.builder(); - fieldMapBuilder.put("project", project); + if (!Objects.isNull(project)) { + fieldMapBuilder.put("project", project); + } fieldValuesMap = fieldMapBuilder.build(); } } @@ -112,14 +119,35 @@ public String getFieldValue(String fieldName) { @Override public String toString() { - return PATH_TEMPLATE.instantiate("project", project); + return PROJECT.instantiate("project", project); } - /** Builder for ProjectName. */ - public static class Builder { + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o != null || getClass() == o.getClass()) { + ProjectName that = ((ProjectName) o); + return Objects.equals(this.project, that.project); + } + return false; + } + @Override + public int hashCode() { + int h = 1; + h *= 1000003; + h ^= Objects.hashCode(project); + return h; + } + + /** Builder for projects/{project}. */ + public static class Builder { private String project; + protected Builder() {} + public String getProject() { return project; } @@ -129,8 +157,6 @@ public Builder setProject(String project) { return this; } - private Builder() {} - private Builder(ProjectName projectName) { project = projectName.project; } @@ -139,24 +165,4 @@ public ProjectName build() { return new ProjectName(this); } } - - @Override - public boolean equals(Object o) { - if (o == this) { - return true; - } - if (o instanceof ProjectName) { - ProjectName that = (ProjectName) o; - return (this.project.equals(that.project)); - } - return false; - } - - @Override - public int hashCode() { - int h = 1; - h *= 1000003; - h ^= project.hashCode(); - return h; - } } diff --git a/proto-google-cloud-bigquerystorage-v1beta1/src/main/java/com/google/cloud/bigquery/storage/v1beta1/ReadSessionName.java b/proto-google-cloud-bigquerystorage-v1beta1/src/main/java/com/google/cloud/bigquery/storage/v1beta1/ReadSessionName.java deleted file mode 100644 index c0762f78e5..0000000000 --- a/proto-google-cloud-bigquerystorage-v1beta1/src/main/java/com/google/cloud/bigquery/storage/v1beta1/ReadSessionName.java +++ /dev/null @@ -1,210 +0,0 @@ -/* - * Copyright 2020 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.google.cloud.bigquery.storage.v1beta1; - -import com.google.api.pathtemplate.PathTemplate; -import com.google.api.resourcenames.ResourceName; -import com.google.common.base.Preconditions; -import com.google.common.collect.ImmutableMap; -import java.util.ArrayList; -import java.util.List; -import java.util.Map; - -/** AUTO-GENERATED DOCUMENTATION AND CLASS */ -@javax.annotation.Generated("by GAPIC protoc plugin") -public class ReadSessionName implements ResourceName { - - private static final PathTemplate PATH_TEMPLATE = - PathTemplate.createWithoutUrlEncoding( - "projects/{project}/locations/{location}/sessions/{session}"); - - private volatile Map fieldValuesMap; - - private final String project; - private final String location; - private final String session; - - public String getProject() { - return project; - } - - public String getLocation() { - return location; - } - - public String getSession() { - return session; - } - - public static Builder newBuilder() { - return new Builder(); - } - - public Builder toBuilder() { - return new Builder(this); - } - - private ReadSessionName(Builder builder) { - project = Preconditions.checkNotNull(builder.getProject()); - location = Preconditions.checkNotNull(builder.getLocation()); - session = Preconditions.checkNotNull(builder.getSession()); - } - - public static ReadSessionName of(String project, String location, String session) { - return newBuilder().setProject(project).setLocation(location).setSession(session).build(); - } - - public static String format(String project, String location, String session) { - return newBuilder() - .setProject(project) - .setLocation(location) - .setSession(session) - .build() - .toString(); - } - - public static ReadSessionName parse(String formattedString) { - if (formattedString.isEmpty()) { - return null; - } - Map matchMap = - PATH_TEMPLATE.validatedMatch( - formattedString, "ReadSessionName.parse: formattedString not in valid format"); - return of(matchMap.get("project"), matchMap.get("location"), matchMap.get("session")); - } - - public static List parseList(List formattedStrings) { - List list = new ArrayList<>(formattedStrings.size()); - for (String formattedString : formattedStrings) { - list.add(parse(formattedString)); - } - return list; - } - - public static List toStringList(List values) { - List list = new ArrayList(values.size()); - for (ReadSessionName value : values) { - if (value == null) { - list.add(""); - } else { - list.add(value.toString()); - } - } - return list; - } - - public static boolean isParsableFrom(String formattedString) { - return PATH_TEMPLATE.matches(formattedString); - } - - public Map getFieldValuesMap() { - if (fieldValuesMap == null) { - synchronized (this) { - if (fieldValuesMap == null) { - ImmutableMap.Builder fieldMapBuilder = ImmutableMap.builder(); - fieldMapBuilder.put("project", project); - fieldMapBuilder.put("location", location); - fieldMapBuilder.put("session", session); - fieldValuesMap = fieldMapBuilder.build(); - } - } - } - return fieldValuesMap; - } - - public String getFieldValue(String fieldName) { - return getFieldValuesMap().get(fieldName); - } - - @Override - public String toString() { - return PATH_TEMPLATE.instantiate("project", project, "location", location, "session", session); - } - - /** Builder for ReadSessionName. */ - public static class Builder { - - private String project; - private String location; - private String session; - - public String getProject() { - return project; - } - - public String getLocation() { - return location; - } - - public String getSession() { - return session; - } - - public Builder setProject(String project) { - this.project = project; - return this; - } - - public Builder setLocation(String location) { - this.location = location; - return this; - } - - public Builder setSession(String session) { - this.session = session; - return this; - } - - private Builder() {} - - private Builder(ReadSessionName readSessionName) { - project = readSessionName.project; - location = readSessionName.location; - session = readSessionName.session; - } - - public ReadSessionName build() { - return new ReadSessionName(this); - } - } - - @Override - public boolean equals(Object o) { - if (o == this) { - return true; - } - if (o instanceof ReadSessionName) { - ReadSessionName that = (ReadSessionName) o; - return (this.project.equals(that.project)) - && (this.location.equals(that.location)) - && (this.session.equals(that.session)); - } - return false; - } - - @Override - public int hashCode() { - int h = 1; - h *= 1000003; - h ^= project.hashCode(); - h *= 1000003; - h ^= location.hashCode(); - h *= 1000003; - h ^= session.hashCode(); - return h; - } -} diff --git a/proto-google-cloud-bigquerystorage-v1beta1/src/main/java/com/google/cloud/bigquery/storage/v1beta1/StreamName.java b/proto-google-cloud-bigquerystorage-v1beta1/src/main/java/com/google/cloud/bigquery/storage/v1beta1/StreamName.java deleted file mode 100644 index a486d4fc85..0000000000 --- a/proto-google-cloud-bigquerystorage-v1beta1/src/main/java/com/google/cloud/bigquery/storage/v1beta1/StreamName.java +++ /dev/null @@ -1,210 +0,0 @@ -/* - * Copyright 2020 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.google.cloud.bigquery.storage.v1beta1; - -import com.google.api.pathtemplate.PathTemplate; -import com.google.api.resourcenames.ResourceName; -import com.google.common.base.Preconditions; -import com.google.common.collect.ImmutableMap; -import java.util.ArrayList; -import java.util.List; -import java.util.Map; - -/** AUTO-GENERATED DOCUMENTATION AND CLASS */ -@javax.annotation.Generated("by GAPIC protoc plugin") -public class StreamName implements ResourceName { - - private static final PathTemplate PATH_TEMPLATE = - PathTemplate.createWithoutUrlEncoding( - "projects/{project}/locations/{location}/streams/{stream}"); - - private volatile Map fieldValuesMap; - - private final String project; - private final String location; - private final String stream; - - public String getProject() { - return project; - } - - public String getLocation() { - return location; - } - - public String getStream() { - return stream; - } - - public static Builder newBuilder() { - return new Builder(); - } - - public Builder toBuilder() { - return new Builder(this); - } - - private StreamName(Builder builder) { - project = Preconditions.checkNotNull(builder.getProject()); - location = Preconditions.checkNotNull(builder.getLocation()); - stream = Preconditions.checkNotNull(builder.getStream()); - } - - public static StreamName of(String project, String location, String stream) { - return newBuilder().setProject(project).setLocation(location).setStream(stream).build(); - } - - public static String format(String project, String location, String stream) { - return newBuilder() - .setProject(project) - .setLocation(location) - .setStream(stream) - .build() - .toString(); - } - - public static StreamName parse(String formattedString) { - if (formattedString.isEmpty()) { - return null; - } - Map matchMap = - PATH_TEMPLATE.validatedMatch( - formattedString, "StreamName.parse: formattedString not in valid format"); - return of(matchMap.get("project"), matchMap.get("location"), matchMap.get("stream")); - } - - public static List parseList(List formattedStrings) { - List list = new ArrayList<>(formattedStrings.size()); - for (String formattedString : formattedStrings) { - list.add(parse(formattedString)); - } - return list; - } - - public static List toStringList(List values) { - List list = new ArrayList(values.size()); - for (StreamName value : values) { - if (value == null) { - list.add(""); - } else { - list.add(value.toString()); - } - } - return list; - } - - public static boolean isParsableFrom(String formattedString) { - return PATH_TEMPLATE.matches(formattedString); - } - - public Map getFieldValuesMap() { - if (fieldValuesMap == null) { - synchronized (this) { - if (fieldValuesMap == null) { - ImmutableMap.Builder fieldMapBuilder = ImmutableMap.builder(); - fieldMapBuilder.put("project", project); - fieldMapBuilder.put("location", location); - fieldMapBuilder.put("stream", stream); - fieldValuesMap = fieldMapBuilder.build(); - } - } - } - return fieldValuesMap; - } - - public String getFieldValue(String fieldName) { - return getFieldValuesMap().get(fieldName); - } - - @Override - public String toString() { - return PATH_TEMPLATE.instantiate("project", project, "location", location, "stream", stream); - } - - /** Builder for StreamName. */ - public static class Builder { - - private String project; - private String location; - private String stream; - - public String getProject() { - return project; - } - - public String getLocation() { - return location; - } - - public String getStream() { - return stream; - } - - public Builder setProject(String project) { - this.project = project; - return this; - } - - public Builder setLocation(String location) { - this.location = location; - return this; - } - - public Builder setStream(String stream) { - this.stream = stream; - return this; - } - - private Builder() {} - - private Builder(StreamName streamName) { - project = streamName.project; - location = streamName.location; - stream = streamName.stream; - } - - public StreamName build() { - return new StreamName(this); - } - } - - @Override - public boolean equals(Object o) { - if (o == this) { - return true; - } - if (o instanceof StreamName) { - StreamName that = (StreamName) o; - return (this.project.equals(that.project)) - && (this.location.equals(that.location)) - && (this.stream.equals(that.stream)); - } - return false; - } - - @Override - public int hashCode() { - int h = 1; - h *= 1000003; - h ^= project.hashCode(); - h *= 1000003; - h ^= location.hashCode(); - h *= 1000003; - h ^= stream.hashCode(); - return h; - } -} diff --git a/proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/ProjectName.java b/proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/ProjectName.java index 3ca1b0df14..60fd9bf737 100644 --- a/proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/ProjectName.java +++ b/proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/ProjectName.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -23,18 +23,26 @@ import java.util.ArrayList; import java.util.List; import java.util.Map; +import java.util.Objects; +import javax.annotation.Generated; -/** AUTO-GENERATED DOCUMENTATION AND CLASS */ -@javax.annotation.Generated("by GAPIC protoc plugin") +// AUTO-GENERATED DOCUMENTATION AND CLASS. +@Generated("by gapic-generator-java") public class ProjectName implements ResourceName { - - private static final PathTemplate PATH_TEMPLATE = + private static final PathTemplate PROJECT = PathTemplate.createWithoutUrlEncoding("projects/{project}"); - private volatile Map fieldValuesMap; - private final String project; + @Deprecated + protected ProjectName() { + project = null; + } + + private ProjectName(Builder builder) { + project = Preconditions.checkNotNull(builder.getProject()); + } + public String getProject() { return project; } @@ -47,10 +55,6 @@ public Builder toBuilder() { return new Builder(this); } - private ProjectName(Builder builder) { - project = Preconditions.checkNotNull(builder.getProject()); - } - public static ProjectName of(String project) { return newBuilder().setProject(project).build(); } @@ -64,7 +68,7 @@ public static ProjectName parse(String formattedString) { return null; } Map matchMap = - PATH_TEMPLATE.validatedMatch( + PROJECT.validatedMatch( formattedString, "ProjectName.parse: formattedString not in valid format"); return of(matchMap.get("project")); } @@ -78,9 +82,9 @@ public static List parseList(List formattedStrings) { } public static List toStringList(List values) { - List list = new ArrayList(values.size()); + List list = new ArrayList<>(values.size()); for (ProjectName value : values) { - if (value == null) { + if (Objects.isNull(value)) { list.add(""); } else { list.add(value.toString()); @@ -90,15 +94,18 @@ public static List toStringList(List values) { } public static boolean isParsableFrom(String formattedString) { - return PATH_TEMPLATE.matches(formattedString); + return PROJECT.matches(formattedString); } + @Override public Map getFieldValuesMap() { - if (fieldValuesMap == null) { + if (Objects.isNull(fieldValuesMap)) { synchronized (this) { - if (fieldValuesMap == null) { + if (Objects.isNull(fieldValuesMap)) { ImmutableMap.Builder fieldMapBuilder = ImmutableMap.builder(); - fieldMapBuilder.put("project", project); + if (!Objects.isNull(project)) { + fieldMapBuilder.put("project", project); + } fieldValuesMap = fieldMapBuilder.build(); } } @@ -112,14 +119,35 @@ public String getFieldValue(String fieldName) { @Override public String toString() { - return PATH_TEMPLATE.instantiate("project", project); + return PROJECT.instantiate("project", project); } - /** Builder for ProjectName. */ - public static class Builder { + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o != null || getClass() == o.getClass()) { + ProjectName that = ((ProjectName) o); + return Objects.equals(this.project, that.project); + } + return false; + } + @Override + public int hashCode() { + int h = 1; + h *= 1000003; + h ^= Objects.hashCode(project); + return h; + } + + /** Builder for projects/{project}. */ + public static class Builder { private String project; + protected Builder() {} + public String getProject() { return project; } @@ -129,8 +157,6 @@ public Builder setProject(String project) { return this; } - private Builder() {} - private Builder(ProjectName projectName) { project = projectName.project; } @@ -139,24 +165,4 @@ public ProjectName build() { return new ProjectName(this); } } - - @Override - public boolean equals(Object o) { - if (o == this) { - return true; - } - if (o instanceof ProjectName) { - ProjectName that = (ProjectName) o; - return (this.project.equals(that.project)); - } - return false; - } - - @Override - public int hashCode() { - int h = 1; - h *= 1000003; - h ^= project.hashCode(); - return h; - } } diff --git a/proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/ReadSessionName.java b/proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/ReadSessionName.java deleted file mode 100644 index 46c8731d94..0000000000 --- a/proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/ReadSessionName.java +++ /dev/null @@ -1,210 +0,0 @@ -/* - * Copyright 2020 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.google.cloud.bigquery.storage.v1beta2; - -import com.google.api.pathtemplate.PathTemplate; -import com.google.api.resourcenames.ResourceName; -import com.google.common.base.Preconditions; -import com.google.common.collect.ImmutableMap; -import java.util.ArrayList; -import java.util.List; -import java.util.Map; - -/** AUTO-GENERATED DOCUMENTATION AND CLASS */ -@javax.annotation.Generated("by GAPIC protoc plugin") -public class ReadSessionName implements ResourceName { - - private static final PathTemplate PATH_TEMPLATE = - PathTemplate.createWithoutUrlEncoding( - "projects/{project}/locations/{location}/sessions/{session}"); - - private volatile Map fieldValuesMap; - - private final String project; - private final String location; - private final String session; - - public String getProject() { - return project; - } - - public String getLocation() { - return location; - } - - public String getSession() { - return session; - } - - public static Builder newBuilder() { - return new Builder(); - } - - public Builder toBuilder() { - return new Builder(this); - } - - private ReadSessionName(Builder builder) { - project = Preconditions.checkNotNull(builder.getProject()); - location = Preconditions.checkNotNull(builder.getLocation()); - session = Preconditions.checkNotNull(builder.getSession()); - } - - public static ReadSessionName of(String project, String location, String session) { - return newBuilder().setProject(project).setLocation(location).setSession(session).build(); - } - - public static String format(String project, String location, String session) { - return newBuilder() - .setProject(project) - .setLocation(location) - .setSession(session) - .build() - .toString(); - } - - public static ReadSessionName parse(String formattedString) { - if (formattedString.isEmpty()) { - return null; - } - Map matchMap = - PATH_TEMPLATE.validatedMatch( - formattedString, "ReadSessionName.parse: formattedString not in valid format"); - return of(matchMap.get("project"), matchMap.get("location"), matchMap.get("session")); - } - - public static List parseList(List formattedStrings) { - List list = new ArrayList<>(formattedStrings.size()); - for (String formattedString : formattedStrings) { - list.add(parse(formattedString)); - } - return list; - } - - public static List toStringList(List values) { - List list = new ArrayList(values.size()); - for (ReadSessionName value : values) { - if (value == null) { - list.add(""); - } else { - list.add(value.toString()); - } - } - return list; - } - - public static boolean isParsableFrom(String formattedString) { - return PATH_TEMPLATE.matches(formattedString); - } - - public Map getFieldValuesMap() { - if (fieldValuesMap == null) { - synchronized (this) { - if (fieldValuesMap == null) { - ImmutableMap.Builder fieldMapBuilder = ImmutableMap.builder(); - fieldMapBuilder.put("project", project); - fieldMapBuilder.put("location", location); - fieldMapBuilder.put("session", session); - fieldValuesMap = fieldMapBuilder.build(); - } - } - } - return fieldValuesMap; - } - - public String getFieldValue(String fieldName) { - return getFieldValuesMap().get(fieldName); - } - - @Override - public String toString() { - return PATH_TEMPLATE.instantiate("project", project, "location", location, "session", session); - } - - /** Builder for ReadSessionName. */ - public static class Builder { - - private String project; - private String location; - private String session; - - public String getProject() { - return project; - } - - public String getLocation() { - return location; - } - - public String getSession() { - return session; - } - - public Builder setProject(String project) { - this.project = project; - return this; - } - - public Builder setLocation(String location) { - this.location = location; - return this; - } - - public Builder setSession(String session) { - this.session = session; - return this; - } - - private Builder() {} - - private Builder(ReadSessionName readSessionName) { - project = readSessionName.project; - location = readSessionName.location; - session = readSessionName.session; - } - - public ReadSessionName build() { - return new ReadSessionName(this); - } - } - - @Override - public boolean equals(Object o) { - if (o == this) { - return true; - } - if (o instanceof ReadSessionName) { - ReadSessionName that = (ReadSessionName) o; - return (this.project.equals(that.project)) - && (this.location.equals(that.location)) - && (this.session.equals(that.session)); - } - return false; - } - - @Override - public int hashCode() { - int h = 1; - h *= 1000003; - h ^= project.hashCode(); - h *= 1000003; - h ^= location.hashCode(); - h *= 1000003; - h ^= session.hashCode(); - return h; - } -} diff --git a/proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/ReadStreamName.java b/proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/ReadStreamName.java index cd616ebba7..0533d6d397 100644 --- a/proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/ReadStreamName.java +++ b/proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/ReadStreamName.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -23,22 +23,36 @@ import java.util.ArrayList; import java.util.List; import java.util.Map; +import java.util.Objects; +import javax.annotation.Generated; -/** AUTO-GENERATED DOCUMENTATION AND CLASS */ -@javax.annotation.Generated("by GAPIC protoc plugin") +// AUTO-GENERATED DOCUMENTATION AND CLASS. +@Generated("by gapic-generator-java") public class ReadStreamName implements ResourceName { - - private static final PathTemplate PATH_TEMPLATE = + private static final PathTemplate PROJECT_LOCATION_SESSION_STREAM = PathTemplate.createWithoutUrlEncoding( "projects/{project}/locations/{location}/sessions/{session}/streams/{stream}"); - private volatile Map fieldValuesMap; - private final String project; private final String location; private final String session; private final String stream; + @Deprecated + protected ReadStreamName() { + project = null; + location = null; + session = null; + stream = null; + } + + private ReadStreamName(Builder builder) { + project = Preconditions.checkNotNull(builder.getProject()); + location = Preconditions.checkNotNull(builder.getLocation()); + session = Preconditions.checkNotNull(builder.getSession()); + stream = Preconditions.checkNotNull(builder.getStream()); + } + public String getProject() { return project; } @@ -63,13 +77,6 @@ public Builder toBuilder() { return new Builder(this); } - private ReadStreamName(Builder builder) { - project = Preconditions.checkNotNull(builder.getProject()); - location = Preconditions.checkNotNull(builder.getLocation()); - session = Preconditions.checkNotNull(builder.getSession()); - stream = Preconditions.checkNotNull(builder.getStream()); - } - public static ReadStreamName of(String project, String location, String session, String stream) { return newBuilder() .setProject(project) @@ -94,7 +101,7 @@ public static ReadStreamName parse(String formattedString) { return null; } Map matchMap = - PATH_TEMPLATE.validatedMatch( + PROJECT_LOCATION_SESSION_STREAM.validatedMatch( formattedString, "ReadStreamName.parse: formattedString not in valid format"); return of( matchMap.get("project"), @@ -112,9 +119,9 @@ public static List parseList(List formattedStrings) { } public static List toStringList(List values) { - List list = new ArrayList(values.size()); + List list = new ArrayList<>(values.size()); for (ReadStreamName value : values) { - if (value == null) { + if (Objects.isNull(value)) { list.add(""); } else { list.add(value.toString()); @@ -124,18 +131,27 @@ public static List toStringList(List values) { } public static boolean isParsableFrom(String formattedString) { - return PATH_TEMPLATE.matches(formattedString); + return PROJECT_LOCATION_SESSION_STREAM.matches(formattedString); } + @Override public Map getFieldValuesMap() { - if (fieldValuesMap == null) { + if (Objects.isNull(fieldValuesMap)) { synchronized (this) { - if (fieldValuesMap == null) { + if (Objects.isNull(fieldValuesMap)) { ImmutableMap.Builder fieldMapBuilder = ImmutableMap.builder(); - fieldMapBuilder.put("project", project); - fieldMapBuilder.put("location", location); - fieldMapBuilder.put("session", session); - fieldMapBuilder.put("stream", stream); + if (!Objects.isNull(project)) { + fieldMapBuilder.put("project", project); + } + if (!Objects.isNull(location)) { + fieldMapBuilder.put("location", location); + } + if (!Objects.isNull(session)) { + fieldMapBuilder.put("session", session); + } + if (!Objects.isNull(stream)) { + fieldMapBuilder.put("stream", stream); + } fieldValuesMap = fieldMapBuilder.build(); } } @@ -149,18 +165,48 @@ public String getFieldValue(String fieldName) { @Override public String toString() { - return PATH_TEMPLATE.instantiate( + return PROJECT_LOCATION_SESSION_STREAM.instantiate( "project", project, "location", location, "session", session, "stream", stream); } - /** Builder for ReadStreamName. */ - public static class Builder { + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o != null || getClass() == o.getClass()) { + ReadStreamName that = ((ReadStreamName) o); + return Objects.equals(this.project, that.project) + && Objects.equals(this.location, that.location) + && Objects.equals(this.session, that.session) + && Objects.equals(this.stream, that.stream); + } + return false; + } + @Override + public int hashCode() { + int h = 1; + h *= 1000003; + h ^= Objects.hashCode(project); + h *= 1000003; + h ^= Objects.hashCode(location); + h *= 1000003; + h ^= Objects.hashCode(session); + h *= 1000003; + h ^= Objects.hashCode(stream); + return h; + } + + /** Builder for projects/{project}/locations/{location}/sessions/{session}/streams/{stream}. */ + public static class Builder { private String project; private String location; private String session; private String stream; + protected Builder() {} + public String getProject() { return project; } @@ -197,8 +243,6 @@ public Builder setStream(String stream) { return this; } - private Builder() {} - private Builder(ReadStreamName readStreamName) { project = readStreamName.project; location = readStreamName.location; @@ -210,33 +254,4 @@ public ReadStreamName build() { return new ReadStreamName(this); } } - - @Override - public boolean equals(Object o) { - if (o == this) { - return true; - } - if (o instanceof ReadStreamName) { - ReadStreamName that = (ReadStreamName) o; - return (this.project.equals(that.project)) - && (this.location.equals(that.location)) - && (this.session.equals(that.session)) - && (this.stream.equals(that.stream)); - } - return false; - } - - @Override - public int hashCode() { - int h = 1; - h *= 1000003; - h ^= project.hashCode(); - h *= 1000003; - h ^= location.hashCode(); - h *= 1000003; - h ^= session.hashCode(); - h *= 1000003; - h ^= stream.hashCode(); - return h; - } } diff --git a/proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/TableName.java b/proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/TableName.java index 476d710941..2d4287f1d1 100644 --- a/proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/TableName.java +++ b/proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/TableName.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -23,20 +23,32 @@ import java.util.ArrayList; import java.util.List; import java.util.Map; +import java.util.Objects; +import javax.annotation.Generated; -/** AUTO-GENERATED DOCUMENTATION AND CLASS */ -@javax.annotation.Generated("by GAPIC protoc plugin") +// AUTO-GENERATED DOCUMENTATION AND CLASS. +@Generated("by gapic-generator-java") public class TableName implements ResourceName { - - private static final PathTemplate PATH_TEMPLATE = + private static final PathTemplate PROJECT_DATASET_TABLE = PathTemplate.createWithoutUrlEncoding("projects/{project}/datasets/{dataset}/tables/{table}"); - private volatile Map fieldValuesMap; - private final String project; private final String dataset; private final String table; + @Deprecated + protected TableName() { + project = null; + dataset = null; + table = null; + } + + private TableName(Builder builder) { + project = Preconditions.checkNotNull(builder.getProject()); + dataset = Preconditions.checkNotNull(builder.getDataset()); + table = Preconditions.checkNotNull(builder.getTable()); + } + public String getProject() { return project; } @@ -57,12 +69,6 @@ public Builder toBuilder() { return new Builder(this); } - private TableName(Builder builder) { - project = Preconditions.checkNotNull(builder.getProject()); - dataset = Preconditions.checkNotNull(builder.getDataset()); - table = Preconditions.checkNotNull(builder.getTable()); - } - public static TableName of(String project, String dataset, String table) { return newBuilder().setProject(project).setDataset(dataset).setTable(table).build(); } @@ -76,7 +82,7 @@ public static TableName parse(String formattedString) { return null; } Map matchMap = - PATH_TEMPLATE.validatedMatch( + PROJECT_DATASET_TABLE.validatedMatch( formattedString, "TableName.parse: formattedString not in valid format"); return of(matchMap.get("project"), matchMap.get("dataset"), matchMap.get("table")); } @@ -90,9 +96,9 @@ public static List parseList(List formattedStrings) { } public static List toStringList(List values) { - List list = new ArrayList(values.size()); + List list = new ArrayList<>(values.size()); for (TableName value : values) { - if (value == null) { + if (Objects.isNull(value)) { list.add(""); } else { list.add(value.toString()); @@ -102,17 +108,24 @@ public static List toStringList(List values) { } public static boolean isParsableFrom(String formattedString) { - return PATH_TEMPLATE.matches(formattedString); + return PROJECT_DATASET_TABLE.matches(formattedString); } + @Override public Map getFieldValuesMap() { - if (fieldValuesMap == null) { + if (Objects.isNull(fieldValuesMap)) { synchronized (this) { - if (fieldValuesMap == null) { + if (Objects.isNull(fieldValuesMap)) { ImmutableMap.Builder fieldMapBuilder = ImmutableMap.builder(); - fieldMapBuilder.put("project", project); - fieldMapBuilder.put("dataset", dataset); - fieldMapBuilder.put("table", table); + if (!Objects.isNull(project)) { + fieldMapBuilder.put("project", project); + } + if (!Objects.isNull(dataset)) { + fieldMapBuilder.put("dataset", dataset); + } + if (!Objects.isNull(table)) { + fieldMapBuilder.put("table", table); + } fieldValuesMap = fieldMapBuilder.build(); } } @@ -126,16 +139,44 @@ public String getFieldValue(String fieldName) { @Override public String toString() { - return PATH_TEMPLATE.instantiate("project", project, "dataset", dataset, "table", table); + return PROJECT_DATASET_TABLE.instantiate( + "project", project, "dataset", dataset, "table", table); } - /** Builder for TableName. */ - public static class Builder { + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o != null || getClass() == o.getClass()) { + TableName that = ((TableName) o); + return Objects.equals(this.project, that.project) + && Objects.equals(this.dataset, that.dataset) + && Objects.equals(this.table, that.table); + } + return false; + } + @Override + public int hashCode() { + int h = 1; + h *= 1000003; + h ^= Objects.hashCode(project); + h *= 1000003; + h ^= Objects.hashCode(dataset); + h *= 1000003; + h ^= Objects.hashCode(table); + return h; + } + + /** Builder for projects/{project}/datasets/{dataset}/tables/{table}. */ + public static class Builder { private String project; private String dataset; private String table; + protected Builder() {} + public String getProject() { return project; } @@ -163,8 +204,6 @@ public Builder setTable(String table) { return this; } - private Builder() {} - private Builder(TableName tableName) { project = tableName.project; dataset = tableName.dataset; @@ -175,30 +214,4 @@ public TableName build() { return new TableName(this); } } - - @Override - public boolean equals(Object o) { - if (o == this) { - return true; - } - if (o instanceof TableName) { - TableName that = (TableName) o; - return (this.project.equals(that.project)) - && (this.dataset.equals(that.dataset)) - && (this.table.equals(that.table)); - } - return false; - } - - @Override - public int hashCode() { - int h = 1; - h *= 1000003; - h ^= project.hashCode(); - h *= 1000003; - h ^= dataset.hashCode(); - h *= 1000003; - h ^= table.hashCode(); - return h; - } } diff --git a/proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/WriteStreamName.java b/proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/WriteStreamName.java index ba877cc86b..d5325dadca 100644 --- a/proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/WriteStreamName.java +++ b/proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/WriteStreamName.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -23,22 +23,36 @@ import java.util.ArrayList; import java.util.List; import java.util.Map; +import java.util.Objects; +import javax.annotation.Generated; -/** AUTO-GENERATED DOCUMENTATION AND CLASS */ -@javax.annotation.Generated("by GAPIC protoc plugin") +// AUTO-GENERATED DOCUMENTATION AND CLASS. +@Generated("by gapic-generator-java") public class WriteStreamName implements ResourceName { - - private static final PathTemplate PATH_TEMPLATE = + private static final PathTemplate PROJECT_DATASET_TABLE_STREAM = PathTemplate.createWithoutUrlEncoding( "projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}"); - private volatile Map fieldValuesMap; - private final String project; private final String dataset; private final String table; private final String stream; + @Deprecated + protected WriteStreamName() { + project = null; + dataset = null; + table = null; + stream = null; + } + + private WriteStreamName(Builder builder) { + project = Preconditions.checkNotNull(builder.getProject()); + dataset = Preconditions.checkNotNull(builder.getDataset()); + table = Preconditions.checkNotNull(builder.getTable()); + stream = Preconditions.checkNotNull(builder.getStream()); + } + public String getProject() { return project; } @@ -63,13 +77,6 @@ public Builder toBuilder() { return new Builder(this); } - private WriteStreamName(Builder builder) { - project = Preconditions.checkNotNull(builder.getProject()); - dataset = Preconditions.checkNotNull(builder.getDataset()); - table = Preconditions.checkNotNull(builder.getTable()); - stream = Preconditions.checkNotNull(builder.getStream()); - } - public static WriteStreamName of(String project, String dataset, String table, String stream) { return newBuilder() .setProject(project) @@ -94,7 +101,7 @@ public static WriteStreamName parse(String formattedString) { return null; } Map matchMap = - PATH_TEMPLATE.validatedMatch( + PROJECT_DATASET_TABLE_STREAM.validatedMatch( formattedString, "WriteStreamName.parse: formattedString not in valid format"); return of( matchMap.get("project"), @@ -112,9 +119,9 @@ public static List parseList(List formattedStrings) { } public static List toStringList(List values) { - List list = new ArrayList(values.size()); + List list = new ArrayList<>(values.size()); for (WriteStreamName value : values) { - if (value == null) { + if (Objects.isNull(value)) { list.add(""); } else { list.add(value.toString()); @@ -124,18 +131,27 @@ public static List toStringList(List values) { } public static boolean isParsableFrom(String formattedString) { - return PATH_TEMPLATE.matches(formattedString); + return PROJECT_DATASET_TABLE_STREAM.matches(formattedString); } + @Override public Map getFieldValuesMap() { - if (fieldValuesMap == null) { + if (Objects.isNull(fieldValuesMap)) { synchronized (this) { - if (fieldValuesMap == null) { + if (Objects.isNull(fieldValuesMap)) { ImmutableMap.Builder fieldMapBuilder = ImmutableMap.builder(); - fieldMapBuilder.put("project", project); - fieldMapBuilder.put("dataset", dataset); - fieldMapBuilder.put("table", table); - fieldMapBuilder.put("stream", stream); + if (!Objects.isNull(project)) { + fieldMapBuilder.put("project", project); + } + if (!Objects.isNull(dataset)) { + fieldMapBuilder.put("dataset", dataset); + } + if (!Objects.isNull(table)) { + fieldMapBuilder.put("table", table); + } + if (!Objects.isNull(stream)) { + fieldMapBuilder.put("stream", stream); + } fieldValuesMap = fieldMapBuilder.build(); } } @@ -149,18 +165,48 @@ public String getFieldValue(String fieldName) { @Override public String toString() { - return PATH_TEMPLATE.instantiate( + return PROJECT_DATASET_TABLE_STREAM.instantiate( "project", project, "dataset", dataset, "table", table, "stream", stream); } - /** Builder for WriteStreamName. */ - public static class Builder { + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o != null || getClass() == o.getClass()) { + WriteStreamName that = ((WriteStreamName) o); + return Objects.equals(this.project, that.project) + && Objects.equals(this.dataset, that.dataset) + && Objects.equals(this.table, that.table) + && Objects.equals(this.stream, that.stream); + } + return false; + } + @Override + public int hashCode() { + int h = 1; + h *= 1000003; + h ^= Objects.hashCode(project); + h *= 1000003; + h ^= Objects.hashCode(dataset); + h *= 1000003; + h ^= Objects.hashCode(table); + h *= 1000003; + h ^= Objects.hashCode(stream); + return h; + } + + /** Builder for projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}. */ + public static class Builder { private String project; private String dataset; private String table; private String stream; + protected Builder() {} + public String getProject() { return project; } @@ -197,8 +243,6 @@ public Builder setStream(String stream) { return this; } - private Builder() {} - private Builder(WriteStreamName writeStreamName) { project = writeStreamName.project; dataset = writeStreamName.dataset; @@ -210,33 +254,4 @@ public WriteStreamName build() { return new WriteStreamName(this); } } - - @Override - public boolean equals(Object o) { - if (o == this) { - return true; - } - if (o instanceof WriteStreamName) { - WriteStreamName that = (WriteStreamName) o; - return (this.project.equals(that.project)) - && (this.dataset.equals(that.dataset)) - && (this.table.equals(that.table)) - && (this.stream.equals(that.stream)); - } - return false; - } - - @Override - public int hashCode() { - int h = 1; - h *= 1000003; - h ^= project.hashCode(); - h *= 1000003; - h ^= dataset.hashCode(); - h *= 1000003; - h ^= table.hashCode(); - h *= 1000003; - h ^= stream.hashCode(); - return h; - } } diff --git a/synth.metadata b/synth.metadata index c94c65a551..f131b56899 100644 --- a/synth.metadata +++ b/synth.metadata @@ -11,39 +11,39 @@ "git": { "name": ".", "remote": "https://github.com/googleapis/java-bigquerystorage.git", - "sha": "20aef28ee5fc0f7b633e50734266bf614c19502d" + "sha": "ab6213c244162c74242d3aaf543bfcf9b2eb4405" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "8cfc6c874574b649b4524a4ef6a0651533325a84", - "internalRef": "345067549" + "sha": "e39e42f368d236203a774ee994fcb4d730c33a83", + "internalRef": "345311069" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "8cfc6c874574b649b4524a4ef6a0651533325a84", - "internalRef": "345067549" + "sha": "e39e42f368d236203a774ee994fcb4d730c33a83", + "internalRef": "345311069" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "8cfc6c874574b649b4524a4ef6a0651533325a84", - "internalRef": "345067549" + "sha": "e39e42f368d236203a774ee994fcb4d730c33a83", + "internalRef": "345311069" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "8cfc6c874574b649b4524a4ef6a0651533325a84", - "internalRef": "345067549" + "sha": "e39e42f368d236203a774ee994fcb4d730c33a83", + "internalRef": "345311069" } }, { @@ -234,7 +234,6 @@ "proto-google-cloud-bigquerystorage-v1/src/main/java/com/google/cloud/bigquery/storage/v1/ReadRowsResponse.java", "proto-google-cloud-bigquerystorage-v1/src/main/java/com/google/cloud/bigquery/storage/v1/ReadRowsResponseOrBuilder.java", "proto-google-cloud-bigquerystorage-v1/src/main/java/com/google/cloud/bigquery/storage/v1/ReadSession.java", - "proto-google-cloud-bigquerystorage-v1/src/main/java/com/google/cloud/bigquery/storage/v1/ReadSessionName.java", "proto-google-cloud-bigquerystorage-v1/src/main/java/com/google/cloud/bigquery/storage/v1/ReadSessionOrBuilder.java", "proto-google-cloud-bigquerystorage-v1/src/main/java/com/google/cloud/bigquery/storage/v1/ReadStream.java", "proto-google-cloud-bigquerystorage-v1/src/main/java/com/google/cloud/bigquery/storage/v1/ReadStreamName.java", @@ -247,7 +246,6 @@ "proto-google-cloud-bigquerystorage-v1/src/main/java/com/google/cloud/bigquery/storage/v1/StreamProto.java", "proto-google-cloud-bigquerystorage-v1/src/main/java/com/google/cloud/bigquery/storage/v1/StreamStats.java", "proto-google-cloud-bigquerystorage-v1/src/main/java/com/google/cloud/bigquery/storage/v1/StreamStatsOrBuilder.java", - "proto-google-cloud-bigquerystorage-v1/src/main/java/com/google/cloud/bigquery/storage/v1/TableName.java", "proto-google-cloud-bigquerystorage-v1/src/main/java/com/google/cloud/bigquery/storage/v1/ThrottleState.java", "proto-google-cloud-bigquerystorage-v1/src/main/java/com/google/cloud/bigquery/storage/v1/ThrottleStateOrBuilder.java", "proto-google-cloud-bigquerystorage-v1/src/main/proto/google/cloud/bigquery/storage/v1/arrow.proto", @@ -268,9 +266,7 @@ "proto-google-cloud-bigquerystorage-v1beta1/src/main/java/com/google/cloud/bigquery/storage/v1beta1/AvroProto.java", "proto-google-cloud-bigquerystorage-v1beta1/src/main/java/com/google/cloud/bigquery/storage/v1beta1/ProjectName.java", "proto-google-cloud-bigquerystorage-v1beta1/src/main/java/com/google/cloud/bigquery/storage/v1beta1/ReadOptions.java", - "proto-google-cloud-bigquerystorage-v1beta1/src/main/java/com/google/cloud/bigquery/storage/v1beta1/ReadSessionName.java", "proto-google-cloud-bigquerystorage-v1beta1/src/main/java/com/google/cloud/bigquery/storage/v1beta1/Storage.java", - "proto-google-cloud-bigquerystorage-v1beta1/src/main/java/com/google/cloud/bigquery/storage/v1beta1/StreamName.java", "proto-google-cloud-bigquerystorage-v1beta1/src/main/java/com/google/cloud/bigquery/storage/v1beta1/TableReferenceProto.java", "proto-google-cloud-bigquerystorage-v1beta1/src/main/proto/google/cloud/bigquery/storage/v1beta1/arrow.proto", "proto-google-cloud-bigquerystorage-v1beta1/src/main/proto/google/cloud/bigquery/storage/v1beta1/avro.proto", @@ -323,7 +319,6 @@ "proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/ReadRowsResponse.java", "proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/ReadRowsResponseOrBuilder.java", "proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/ReadSession.java", - "proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/ReadSessionName.java", "proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/ReadSessionOrBuilder.java", "proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/ReadStream.java", "proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/ReadStreamName.java", From c0bc27b28c044ce79f1aa8ef955275f3c12e6a4c Mon Sep 17 00:00:00 2001 From: yoshi-automation Date: Fri, 11 Dec 2020 16:24:01 -0800 Subject: [PATCH 2/6] feat!: Updates to BigQuery Write API V1Beta2 public interface. This includes breaking changes to the API, it is fine because the API is not officially launched yet. PiperOrigin-RevId: 345469340 Source-Author: Google APIs Source-Date: Thu Dec 3 09:33:11 2020 -0800 Source-Repo: googleapis/googleapis Source-Sha: b53c4d98aab1eae3dac90b37019dede686782f13 Source-Link: https://github.com/googleapis/googleapis/commit/b53c4d98aab1eae3dac90b37019dede686782f13 --- .../v1beta2/BigQueryWriteClientTest.java | 10 +- .../storage/v1beta2/AppendRowsRequest.java | 176 ++- .../v1beta2/AppendRowsRequestOrBuilder.java | 24 +- .../storage/v1beta2/AppendRowsResponse.java | 1306 ++++++++++++++++- .../v1beta2/AppendRowsResponseOrBuilder.java | 79 +- .../BatchCommitWriteStreamsResponse.java | 548 ++++++- ...chCommitWriteStreamsResponseOrBuilder.java | 61 + .../storage/v1beta2/StorageError.java | 1214 +++++++++++++++ .../v1beta2/StorageErrorOrBuilder.java | 100 ++ .../storage/v1beta2/StorageProto.java | 223 +-- .../bigquery/storage/v1beta2/StreamProto.java | 30 +- .../bigquery/storage/v1beta2/WriteStream.java | 179 --- .../storage/v1beta2/WriteStreamOrBuilder.java | 25 - .../bigquery/storage/v1beta2/storage.proto | 78 +- .../bigquery/storage/v1beta2/stream.proto | 3 - synth.metadata | 18 +- 16 files changed, 3609 insertions(+), 465 deletions(-) create mode 100644 proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/StorageError.java create mode 100644 proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/StorageErrorOrBuilder.java diff --git a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/BigQueryWriteClientTest.java b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/BigQueryWriteClientTest.java index 1a9c3ee14b..bc30aeea74 100644 --- a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/BigQueryWriteClientTest.java +++ b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/BigQueryWriteClientTest.java @@ -32,6 +32,7 @@ import com.google.protobuf.Timestamp; import io.grpc.StatusRuntimeException; import java.io.IOException; +import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.UUID; @@ -90,7 +91,6 @@ public void createWriteStreamTest() throws Exception { .setCreateTime(Timestamp.newBuilder().build()) .setCommitTime(Timestamp.newBuilder().build()) .setTableSchema(TableSchema.newBuilder().build()) - .setExternalId("external_id-1153075697") .build(); mockBigQueryWrite.addResponse(expectedResponse); @@ -135,7 +135,6 @@ public void createWriteStreamTest2() throws Exception { .setCreateTime(Timestamp.newBuilder().build()) .setCommitTime(Timestamp.newBuilder().build()) .setTableSchema(TableSchema.newBuilder().build()) - .setExternalId("external_id-1153075697") .build(); mockBigQueryWrite.addResponse(expectedResponse); @@ -182,7 +181,7 @@ public void appendRowsTest() throws Exception { .setWriteStream( WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]").toString()) .setOffset(Int64Value.newBuilder().build()) - .setIgnoreUnknownFields(true) + .setTraceId("trace_id1270300245") .build(); MockStreamObserver responseObserver = new MockStreamObserver<>(); @@ -209,7 +208,7 @@ public void appendRowsExceptionTest() throws Exception { .setWriteStream( WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]").toString()) .setOffset(Int64Value.newBuilder().build()) - .setIgnoreUnknownFields(true) + .setTraceId("trace_id1270300245") .build(); MockStreamObserver responseObserver = new MockStreamObserver<>(); @@ -239,7 +238,6 @@ public void getWriteStreamTest() throws Exception { .setCreateTime(Timestamp.newBuilder().build()) .setCommitTime(Timestamp.newBuilder().build()) .setTableSchema(TableSchema.newBuilder().build()) - .setExternalId("external_id-1153075697") .build(); mockBigQueryWrite.addResponse(expectedResponse); @@ -281,7 +279,6 @@ public void getWriteStreamTest2() throws Exception { .setCreateTime(Timestamp.newBuilder().build()) .setCommitTime(Timestamp.newBuilder().build()) .setTableSchema(TableSchema.newBuilder().build()) - .setExternalId("external_id-1153075697") .build(); mockBigQueryWrite.addResponse(expectedResponse); @@ -392,6 +389,7 @@ public void batchCommitWriteStreamsTest() throws Exception { BatchCommitWriteStreamsResponse expectedResponse = BatchCommitWriteStreamsResponse.newBuilder() .setCommitTime(Timestamp.newBuilder().build()) + .addAllStreamErrors(new ArrayList()) .build(); mockBigQueryWrite.addResponse(expectedResponse); diff --git a/proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/AppendRowsRequest.java b/proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/AppendRowsRequest.java index 48f5338379..d42a64264c 100644 --- a/proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/AppendRowsRequest.java +++ b/proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/AppendRowsRequest.java @@ -39,6 +39,7 @@ private AppendRowsRequest(com.google.protobuf.GeneratedMessageV3.Builder buil private AppendRowsRequest() { writeStream_ = ""; + traceId_ = ""; } @java.lang.Override @@ -114,9 +115,11 @@ private AppendRowsRequest( rowsCase_ = 4; break; } - case 40: + case 50: { - ignoreUnknownFields_ = input.readBool(); + java.lang.String s = input.readStringRequireUtf8(); + + traceId_ = s; break; } default: @@ -1473,24 +1476,55 @@ public com.google.cloud.bigquery.storage.v1beta2.AppendRowsRequest.ProtoData get .getDefaultInstance(); } - public static final int IGNORE_UNKNOWN_FIELDS_FIELD_NUMBER = 5; - private boolean ignoreUnknownFields_; + public static final int TRACE_ID_FIELD_NUMBER = 6; + private volatile java.lang.Object traceId_; + /** + * + * + *
+   * Id set by client to annotate its identity. Only initial request setting is
+   * respected.
+   * 
+ * + * string trace_id = 6; + * + * @return The traceId. + */ + @java.lang.Override + public java.lang.String getTraceId() { + java.lang.Object ref = traceId_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + traceId_ = s; + return s; + } + } /** * * *
-   * Only initial request setting is respected. If true, drop unknown input
-   * fields. Otherwise, the extra fields will cause append to fail. Default
-   * value is false.
+   * Id set by client to annotate its identity. Only initial request setting is
+   * respected.
    * 
* - * bool ignore_unknown_fields = 5; + * string trace_id = 6; * - * @return The ignoreUnknownFields. + * @return The bytes for traceId. */ @java.lang.Override - public boolean getIgnoreUnknownFields() { - return ignoreUnknownFields_; + public com.google.protobuf.ByteString getTraceIdBytes() { + java.lang.Object ref = traceId_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + traceId_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } } private byte memoizedIsInitialized = -1; @@ -1523,8 +1557,8 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io output.writeMessage( 4, (com.google.cloud.bigquery.storage.v1beta2.AppendRowsRequest.ProtoData) rows_); } - if (ignoreUnknownFields_ != false) { - output.writeBool(5, ignoreUnknownFields_); + if (!getTraceIdBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 6, traceId_); } unknownFields.writeTo(output); } @@ -1546,8 +1580,8 @@ public int getSerializedSize() { com.google.protobuf.CodedOutputStream.computeMessageSize( 4, (com.google.cloud.bigquery.storage.v1beta2.AppendRowsRequest.ProtoData) rows_); } - if (ignoreUnknownFields_ != false) { - size += com.google.protobuf.CodedOutputStream.computeBoolSize(5, ignoreUnknownFields_); + if (!getTraceIdBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(6, traceId_); } size += unknownFields.getSerializedSize(); memoizedSize = size; @@ -1570,7 +1604,7 @@ public boolean equals(final java.lang.Object obj) { if (hasOffset()) { if (!getOffset().equals(other.getOffset())) return false; } - if (getIgnoreUnknownFields() != other.getIgnoreUnknownFields()) return false; + if (!getTraceId().equals(other.getTraceId())) return false; if (!getRowsCase().equals(other.getRowsCase())) return false; switch (rowsCase_) { case 4: @@ -1596,8 +1630,8 @@ public int hashCode() { hash = (37 * hash) + OFFSET_FIELD_NUMBER; hash = (53 * hash) + getOffset().hashCode(); } - hash = (37 * hash) + IGNORE_UNKNOWN_FIELDS_FIELD_NUMBER; - hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(getIgnoreUnknownFields()); + hash = (37 * hash) + TRACE_ID_FIELD_NUMBER; + hash = (53 * hash) + getTraceId().hashCode(); switch (rowsCase_) { case 4: hash = (37 * hash) + PROTO_ROWS_FIELD_NUMBER; @@ -1760,7 +1794,7 @@ public Builder clear() { offset_ = null; offsetBuilder_ = null; } - ignoreUnknownFields_ = false; + traceId_ = ""; rowsCase_ = 0; rows_ = null; @@ -1804,7 +1838,7 @@ public com.google.cloud.bigquery.storage.v1beta2.AppendRowsRequest buildPartial( result.rows_ = protoRowsBuilder_.build(); } } - result.ignoreUnknownFields_ = ignoreUnknownFields_; + result.traceId_ = traceId_; result.rowsCase_ = rowsCase_; onBuilt(); return result; @@ -1863,8 +1897,9 @@ public Builder mergeFrom(com.google.cloud.bigquery.storage.v1beta2.AppendRowsReq if (other.hasOffset()) { mergeOffset(other.getOffset()); } - if (other.getIgnoreUnknownFields() != false) { - setIgnoreUnknownFields(other.getIgnoreUnknownFields()); + if (!other.getTraceId().isEmpty()) { + traceId_ = other.traceId_; + onChanged(); } switch (other.getRowsCase()) { case PROTO_ROWS: @@ -2498,41 +2533,72 @@ public Builder clearProtoRows() { return protoRowsBuilder_; } - private boolean ignoreUnknownFields_; + private java.lang.Object traceId_ = ""; /** * * *
-     * Only initial request setting is respected. If true, drop unknown input
-     * fields. Otherwise, the extra fields will cause append to fail. Default
-     * value is false.
+     * Id set by client to annotate its identity. Only initial request setting is
+     * respected.
      * 
* - * bool ignore_unknown_fields = 5; + * string trace_id = 6; * - * @return The ignoreUnknownFields. + * @return The traceId. */ - @java.lang.Override - public boolean getIgnoreUnknownFields() { - return ignoreUnknownFields_; + public java.lang.String getTraceId() { + java.lang.Object ref = traceId_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + traceId_ = s; + return s; + } else { + return (java.lang.String) ref; + } } /** * * *
-     * Only initial request setting is respected. If true, drop unknown input
-     * fields. Otherwise, the extra fields will cause append to fail. Default
-     * value is false.
+     * Id set by client to annotate its identity. Only initial request setting is
+     * respected.
      * 
* - * bool ignore_unknown_fields = 5; + * string trace_id = 6; * - * @param value The ignoreUnknownFields to set. + * @return The bytes for traceId. + */ + public com.google.protobuf.ByteString getTraceIdBytes() { + java.lang.Object ref = traceId_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + traceId_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * + * + *
+     * Id set by client to annotate its identity. Only initial request setting is
+     * respected.
+     * 
+ * + * string trace_id = 6; + * + * @param value The traceId to set. * @return This builder for chaining. */ - public Builder setIgnoreUnknownFields(boolean value) { + public Builder setTraceId(java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } - ignoreUnknownFields_ = value; + traceId_ = value; onChanged(); return this; } @@ -2540,18 +2606,40 @@ public Builder setIgnoreUnknownFields(boolean value) { * * *
-     * Only initial request setting is respected. If true, drop unknown input
-     * fields. Otherwise, the extra fields will cause append to fail. Default
-     * value is false.
+     * Id set by client to annotate its identity. Only initial request setting is
+     * respected.
      * 
* - * bool ignore_unknown_fields = 5; + * string trace_id = 6; * * @return This builder for chaining. */ - public Builder clearIgnoreUnknownFields() { + public Builder clearTraceId() { + + traceId_ = getDefaultInstance().getTraceId(); + onChanged(); + return this; + } + /** + * + * + *
+     * Id set by client to annotate its identity. Only initial request setting is
+     * respected.
+     * 
+ * + * string trace_id = 6; + * + * @param value The bytes for traceId to set. + * @return This builder for chaining. + */ + public Builder setTraceIdBytes(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); - ignoreUnknownFields_ = false; + traceId_ = value; onChanged(); return this; } diff --git a/proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/AppendRowsRequestOrBuilder.java b/proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/AppendRowsRequestOrBuilder.java index efff1f56d1..fb9bb565f1 100644 --- a/proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/AppendRowsRequestOrBuilder.java +++ b/proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/AppendRowsRequestOrBuilder.java @@ -144,16 +144,28 @@ public interface AppendRowsRequestOrBuilder * * *
-   * Only initial request setting is respected. If true, drop unknown input
-   * fields. Otherwise, the extra fields will cause append to fail. Default
-   * value is false.
+   * Id set by client to annotate its identity. Only initial request setting is
+   * respected.
    * 
* - * bool ignore_unknown_fields = 5; + * string trace_id = 6; * - * @return The ignoreUnknownFields. + * @return The traceId. */ - boolean getIgnoreUnknownFields(); + java.lang.String getTraceId(); + /** + * + * + *
+   * Id set by client to annotate its identity. Only initial request setting is
+   * respected.
+   * 
+ * + * string trace_id = 6; + * + * @return The bytes for traceId. + */ + com.google.protobuf.ByteString getTraceIdBytes(); public com.google.cloud.bigquery.storage.v1beta2.AppendRowsRequest.RowsCase getRowsCase(); } diff --git a/proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/AppendRowsResponse.java b/proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/AppendRowsResponse.java index 76e4e595e6..9edad09966 100644 --- a/proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/AppendRowsResponse.java +++ b/proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/AppendRowsResponse.java @@ -68,10 +68,28 @@ private AppendRowsResponse( case 0: done = true; break; - case 8: + case 10: { + com.google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.AppendResult.Builder + subBuilder = null; + if (responseCase_ == 1) { + subBuilder = + ((com.google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.AppendResult) + response_) + .toBuilder(); + } + response_ = + input.readMessage( + com.google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.AppendResult + .parser(), + extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom( + (com.google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.AppendResult) + response_); + response_ = subBuilder.buildPartial(); + } responseCase_ = 1; - response_ = input.readInt64(); break; } case 18: @@ -139,6 +157,796 @@ public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { com.google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.Builder.class); } + public interface AppendResultOrBuilder + extends + // @@protoc_insertion_point(interface_extends:google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.AppendResult) + com.google.protobuf.MessageOrBuilder { + + /** + * + * + *
+     * The row offset at which the last append occurred. The offset will not be
+     * set if appending using default streams.
+     * 
+ * + * .google.protobuf.Int64Value offset = 1; + * + * @return Whether the offset field is set. + */ + boolean hasOffset(); + /** + * + * + *
+     * The row offset at which the last append occurred. The offset will not be
+     * set if appending using default streams.
+     * 
+ * + * .google.protobuf.Int64Value offset = 1; + * + * @return The offset. + */ + com.google.protobuf.Int64Value getOffset(); + /** + * + * + *
+     * The row offset at which the last append occurred. The offset will not be
+     * set if appending using default streams.
+     * 
+ * + * .google.protobuf.Int64Value offset = 1; + */ + com.google.protobuf.Int64ValueOrBuilder getOffsetOrBuilder(); + } + /** + * + * + *
+   * A success append result.
+   * 
+ * + * Protobuf type {@code google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.AppendResult} + */ + public static final class AppendResult extends com.google.protobuf.GeneratedMessageV3 + implements + // @@protoc_insertion_point(message_implements:google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.AppendResult) + AppendResultOrBuilder { + private static final long serialVersionUID = 0L; + // Use AppendResult.newBuilder() to construct. + private AppendResult(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + + private AppendResult() {} + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance(UnusedPrivateParameter unused) { + return new AppendResult(); + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet getUnknownFields() { + return this.unknownFields; + } + + private AppendResult( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: + { + com.google.protobuf.Int64Value.Builder subBuilder = null; + if (offset_ != null) { + subBuilder = offset_.toBuilder(); + } + offset_ = + input.readMessage(com.google.protobuf.Int64Value.parser(), extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(offset_); + offset_ = subBuilder.buildPartial(); + } + + break; + } + default: + { + if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + + public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { + return com.google.cloud.bigquery.storage.v1beta2.StorageProto + .internal_static_google_cloud_bigquery_storage_v1beta2_AppendRowsResponse_AppendResult_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.bigquery.storage.v1beta2.StorageProto + .internal_static_google_cloud_bigquery_storage_v1beta2_AppendRowsResponse_AppendResult_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.AppendResult.class, + com.google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.AppendResult.Builder + .class); + } + + public static final int OFFSET_FIELD_NUMBER = 1; + private com.google.protobuf.Int64Value offset_; + /** + * + * + *
+     * The row offset at which the last append occurred. The offset will not be
+     * set if appending using default streams.
+     * 
+ * + * .google.protobuf.Int64Value offset = 1; + * + * @return Whether the offset field is set. + */ + @java.lang.Override + public boolean hasOffset() { + return offset_ != null; + } + /** + * + * + *
+     * The row offset at which the last append occurred. The offset will not be
+     * set if appending using default streams.
+     * 
+ * + * .google.protobuf.Int64Value offset = 1; + * + * @return The offset. + */ + @java.lang.Override + public com.google.protobuf.Int64Value getOffset() { + return offset_ == null ? com.google.protobuf.Int64Value.getDefaultInstance() : offset_; + } + /** + * + * + *
+     * The row offset at which the last append occurred. The offset will not be
+     * set if appending using default streams.
+     * 
+ * + * .google.protobuf.Int64Value offset = 1; + */ + @java.lang.Override + public com.google.protobuf.Int64ValueOrBuilder getOffsetOrBuilder() { + return getOffset(); + } + + private byte memoizedIsInitialized = -1; + + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { + if (offset_ != null) { + output.writeMessage(1, getOffset()); + } + unknownFields.writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (offset_ != null) { + size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getOffset()); + } + size += unknownFields.getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj + instanceof com.google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.AppendResult)) { + return super.equals(obj); + } + com.google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.AppendResult other = + (com.google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.AppendResult) obj; + + if (hasOffset() != other.hasOffset()) return false; + if (hasOffset()) { + if (!getOffset().equals(other.getOffset())) return false; + } + if (!unknownFields.equals(other.unknownFields)) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + if (hasOffset()) { + hash = (37 * hash) + OFFSET_FIELD_NUMBER; + hash = (53 * hash) + getOffset().hashCode(); + } + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static com.google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.AppendResult + parseFrom(java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.AppendResult + parseFrom( + java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.AppendResult + parseFrom(com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.AppendResult + parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.AppendResult + parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.AppendResult + parseFrom(byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.AppendResult + parseFrom(java.io.InputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.AppendResult + parseFrom( + java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException( + PARSER, input, extensionRegistry); + } + + public static com.google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.AppendResult + parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.AppendResult + parseDelimitedFrom( + java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( + PARSER, input, extensionRegistry); + } + + public static com.google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.AppendResult + parseFrom(com.google.protobuf.CodedInputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.AppendResult + parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException( + PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { + return newBuilder(); + } + + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + + public static Builder newBuilder( + com.google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.AppendResult prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * + * + *
+     * A success append result.
+     * 
+ * + * Protobuf type {@code google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.AppendResult} + */ + public static final class Builder + extends com.google.protobuf.GeneratedMessageV3.Builder + implements + // @@protoc_insertion_point(builder_implements:google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.AppendResult) + com.google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.AppendResultOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { + return com.google.cloud.bigquery.storage.v1beta2.StorageProto + .internal_static_google_cloud_bigquery_storage_v1beta2_AppendRowsResponse_AppendResult_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.bigquery.storage.v1beta2.StorageProto + .internal_static_google_cloud_bigquery_storage_v1beta2_AppendRowsResponse_AppendResult_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.AppendResult.class, + com.google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.AppendResult.Builder + .class); + } + + // Construct using + // com.google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.AppendResult.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {} + } + + @java.lang.Override + public Builder clear() { + super.clear(); + if (offsetBuilder_ == null) { + offset_ = null; + } else { + offset_ = null; + offsetBuilder_ = null; + } + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { + return com.google.cloud.bigquery.storage.v1beta2.StorageProto + .internal_static_google_cloud_bigquery_storage_v1beta2_AppendRowsResponse_AppendResult_descriptor; + } + + @java.lang.Override + public com.google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.AppendResult + getDefaultInstanceForType() { + return com.google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.AppendResult + .getDefaultInstance(); + } + + @java.lang.Override + public com.google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.AppendResult build() { + com.google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.AppendResult result = + buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public com.google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.AppendResult + buildPartial() { + com.google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.AppendResult result = + new com.google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.AppendResult(this); + if (offsetBuilder_ == null) { + result.offset_ = offset_; + } else { + result.offset_ = offsetBuilder_.build(); + } + onBuilt(); + return result; + } + + @java.lang.Override + public Builder clone() { + return super.clone(); + } + + @java.lang.Override + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { + return super.setField(field, value); + } + + @java.lang.Override + public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { + return super.clearField(field); + } + + @java.lang.Override + public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return super.clearOneof(oneof); + } + + @java.lang.Override + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, + java.lang.Object value) { + return super.setRepeatedField(field, index, value); + } + + @java.lang.Override + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { + return super.addRepeatedField(field, value); + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other + instanceof com.google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.AppendResult) { + return mergeFrom( + (com.google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.AppendResult) other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom( + com.google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.AppendResult other) { + if (other + == com.google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.AppendResult + .getDefaultInstance()) return this; + if (other.hasOffset()) { + mergeOffset(other.getOffset()); + } + this.mergeUnknownFields(other.unknownFields); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.AppendResult parsedMessage = + null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = + (com.google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.AppendResult) + e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + + private com.google.protobuf.Int64Value offset_; + private com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.Int64Value, + com.google.protobuf.Int64Value.Builder, + com.google.protobuf.Int64ValueOrBuilder> + offsetBuilder_; + /** + * + * + *
+       * The row offset at which the last append occurred. The offset will not be
+       * set if appending using default streams.
+       * 
+ * + * .google.protobuf.Int64Value offset = 1; + * + * @return Whether the offset field is set. + */ + public boolean hasOffset() { + return offsetBuilder_ != null || offset_ != null; + } + /** + * + * + *
+       * The row offset at which the last append occurred. The offset will not be
+       * set if appending using default streams.
+       * 
+ * + * .google.protobuf.Int64Value offset = 1; + * + * @return The offset. + */ + public com.google.protobuf.Int64Value getOffset() { + if (offsetBuilder_ == null) { + return offset_ == null ? com.google.protobuf.Int64Value.getDefaultInstance() : offset_; + } else { + return offsetBuilder_.getMessage(); + } + } + /** + * + * + *
+       * The row offset at which the last append occurred. The offset will not be
+       * set if appending using default streams.
+       * 
+ * + * .google.protobuf.Int64Value offset = 1; + */ + public Builder setOffset(com.google.protobuf.Int64Value value) { + if (offsetBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + offset_ = value; + onChanged(); + } else { + offsetBuilder_.setMessage(value); + } + + return this; + } + /** + * + * + *
+       * The row offset at which the last append occurred. The offset will not be
+       * set if appending using default streams.
+       * 
+ * + * .google.protobuf.Int64Value offset = 1; + */ + public Builder setOffset(com.google.protobuf.Int64Value.Builder builderForValue) { + if (offsetBuilder_ == null) { + offset_ = builderForValue.build(); + onChanged(); + } else { + offsetBuilder_.setMessage(builderForValue.build()); + } + + return this; + } + /** + * + * + *
+       * The row offset at which the last append occurred. The offset will not be
+       * set if appending using default streams.
+       * 
+ * + * .google.protobuf.Int64Value offset = 1; + */ + public Builder mergeOffset(com.google.protobuf.Int64Value value) { + if (offsetBuilder_ == null) { + if (offset_ != null) { + offset_ = + com.google.protobuf.Int64Value.newBuilder(offset_).mergeFrom(value).buildPartial(); + } else { + offset_ = value; + } + onChanged(); + } else { + offsetBuilder_.mergeFrom(value); + } + + return this; + } + /** + * + * + *
+       * The row offset at which the last append occurred. The offset will not be
+       * set if appending using default streams.
+       * 
+ * + * .google.protobuf.Int64Value offset = 1; + */ + public Builder clearOffset() { + if (offsetBuilder_ == null) { + offset_ = null; + onChanged(); + } else { + offset_ = null; + offsetBuilder_ = null; + } + + return this; + } + /** + * + * + *
+       * The row offset at which the last append occurred. The offset will not be
+       * set if appending using default streams.
+       * 
+ * + * .google.protobuf.Int64Value offset = 1; + */ + public com.google.protobuf.Int64Value.Builder getOffsetBuilder() { + + onChanged(); + return getOffsetFieldBuilder().getBuilder(); + } + /** + * + * + *
+       * The row offset at which the last append occurred. The offset will not be
+       * set if appending using default streams.
+       * 
+ * + * .google.protobuf.Int64Value offset = 1; + */ + public com.google.protobuf.Int64ValueOrBuilder getOffsetOrBuilder() { + if (offsetBuilder_ != null) { + return offsetBuilder_.getMessageOrBuilder(); + } else { + return offset_ == null ? com.google.protobuf.Int64Value.getDefaultInstance() : offset_; + } + } + /** + * + * + *
+       * The row offset at which the last append occurred. The offset will not be
+       * set if appending using default streams.
+       * 
+ * + * .google.protobuf.Int64Value offset = 1; + */ + private com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.Int64Value, + com.google.protobuf.Int64Value.Builder, + com.google.protobuf.Int64ValueOrBuilder> + getOffsetFieldBuilder() { + if (offsetBuilder_ == null) { + offsetBuilder_ = + new com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.Int64Value, + com.google.protobuf.Int64Value.Builder, + com.google.protobuf.Int64ValueOrBuilder>( + getOffset(), getParentForChildren(), isClean()); + offset_ = null; + } + return offsetBuilder_; + } + + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + // @@protoc_insertion_point(builder_scope:google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.AppendResult) + } + + // @@protoc_insertion_point(class_scope:google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.AppendResult) + private static final com.google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.AppendResult + DEFAULT_INSTANCE; + + static { + DEFAULT_INSTANCE = + new com.google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.AppendResult(); + } + + public static com.google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.AppendResult + getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + @java.lang.Override + public AppendResult parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new AppendResult(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public com.google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.AppendResult + getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + } + private int responseCase_ = 0; private java.lang.Object response_; @@ -146,7 +954,7 @@ public enum ResponseCase implements com.google.protobuf.Internal.EnumLite, com.google.protobuf.AbstractMessage.InternalOneOfEnum { - OFFSET(1), + APPEND_RESULT(1), ERROR(2), RESPONSE_NOT_SET(0); private final int value; @@ -167,7 +975,7 @@ public static ResponseCase valueOf(int value) { public static ResponseCase forNumber(int value) { switch (value) { case 1: - return OFFSET; + return APPEND_RESULT; case 2: return ERROR; case 0: @@ -186,24 +994,62 @@ public ResponseCase getResponseCase() { return ResponseCase.forNumber(responseCase_); } - public static final int OFFSET_FIELD_NUMBER = 1; + public static final int APPEND_RESULT_FIELD_NUMBER = 1; + /** + * + * + *
+   * Result if the append is successful.
+   * 
+ * + * .google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.AppendResult append_result = 1; + * + * + * @return Whether the appendResult field is set. + */ + @java.lang.Override + public boolean hasAppendResult() { + return responseCase_ == 1; + } /** * * *
-   * The row offset at which the last append occurred.
+   * Result if the append is successful.
    * 
* - * int64 offset = 1; + * .google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.AppendResult append_result = 1; + * * - * @return The offset. + * @return The appendResult. */ @java.lang.Override - public long getOffset() { + public com.google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.AppendResult + getAppendResult() { if (responseCase_ == 1) { - return (java.lang.Long) response_; + return (com.google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.AppendResult) response_; } - return 0L; + return com.google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.AppendResult + .getDefaultInstance(); + } + /** + * + * + *
+   * Result if the append is successful.
+   * 
+ * + * .google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.AppendResult append_result = 1; + * + */ + @java.lang.Override + public com.google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.AppendResultOrBuilder + getAppendResultOrBuilder() { + if (responseCase_ == 1) { + return (com.google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.AppendResult) response_; + } + return com.google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.AppendResult + .getDefaultInstance(); } public static final int ERROR_FIELD_NUMBER = 2; @@ -211,8 +1057,19 @@ public long getOffset() { * * *
-   * Error in case of append failure. If set, it means rows are not accepted
-   * into the system. Users can retry within the same connection.
+   * Error in case of request failed. If set, it means rows are not accepted
+   * into the system. Users can retry or continue with other requests within
+   * the same connection.
+   * ALREADY_EXISTS: happens when offset is specified, it means the entire
+   *   request is already appended, it is safe to ignore this error.
+   * OUT_OF_RANGE: happens when offset is specified, it means the specified
+   *   offset is beyond the end of the stream.
+   * INVALID_ARGUMENT: error caused by malformed request or data.
+   * RESOURCE_EXHAUSTED: request rejected due to throttling. Only happens when
+   *   append without offset.
+   * ABORTED: request processing is aborted because of prior failures, request
+   *   can be retried if previous failure is fixed.
+   * INTERNAL: server side errors that can be retried.
    * 
* * .google.rpc.Status error = 2; @@ -227,8 +1084,19 @@ public boolean hasError() { * * *
-   * Error in case of append failure. If set, it means rows are not accepted
-   * into the system. Users can retry within the same connection.
+   * Error in case of request failed. If set, it means rows are not accepted
+   * into the system. Users can retry or continue with other requests within
+   * the same connection.
+   * ALREADY_EXISTS: happens when offset is specified, it means the entire
+   *   request is already appended, it is safe to ignore this error.
+   * OUT_OF_RANGE: happens when offset is specified, it means the specified
+   *   offset is beyond the end of the stream.
+   * INVALID_ARGUMENT: error caused by malformed request or data.
+   * RESOURCE_EXHAUSTED: request rejected due to throttling. Only happens when
+   *   append without offset.
+   * ABORTED: request processing is aborted because of prior failures, request
+   *   can be retried if previous failure is fixed.
+   * INTERNAL: server side errors that can be retried.
    * 
* * .google.rpc.Status error = 2; @@ -246,8 +1114,19 @@ public com.google.rpc.Status getError() { * * *
-   * Error in case of append failure. If set, it means rows are not accepted
-   * into the system. Users can retry within the same connection.
+   * Error in case of request failed. If set, it means rows are not accepted
+   * into the system. Users can retry or continue with other requests within
+   * the same connection.
+   * ALREADY_EXISTS: happens when offset is specified, it means the entire
+   *   request is already appended, it is safe to ignore this error.
+   * OUT_OF_RANGE: happens when offset is specified, it means the specified
+   *   offset is beyond the end of the stream.
+   * INVALID_ARGUMENT: error caused by malformed request or data.
+   * RESOURCE_EXHAUSTED: request rejected due to throttling. Only happens when
+   *   append without offset.
+   * ABORTED: request processing is aborted because of prior failures, request
+   *   can be retried if previous failure is fixed.
+   * INTERNAL: server side errors that can be retried.
    * 
* * .google.rpc.Status error = 2; @@ -330,7 +1209,8 @@ public final boolean isInitialized() { @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (responseCase_ == 1) { - output.writeInt64(1, (long) ((java.lang.Long) response_)); + output.writeMessage( + 1, (com.google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.AppendResult) response_); } if (responseCase_ == 2) { output.writeMessage(2, (com.google.rpc.Status) response_); @@ -349,8 +1229,10 @@ public int getSerializedSize() { size = 0; if (responseCase_ == 1) { size += - com.google.protobuf.CodedOutputStream.computeInt64Size( - 1, (long) ((java.lang.Long) response_)); + com.google.protobuf.CodedOutputStream.computeMessageSize( + 1, + (com.google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.AppendResult) + response_); } if (responseCase_ == 2) { size += @@ -383,7 +1265,7 @@ public boolean equals(final java.lang.Object obj) { if (!getResponseCase().equals(other.getResponseCase())) return false; switch (responseCase_) { case 1: - if (getOffset() != other.getOffset()) return false; + if (!getAppendResult().equals(other.getAppendResult())) return false; break; case 2: if (!getError().equals(other.getError())) return false; @@ -408,8 +1290,8 @@ public int hashCode() { } switch (responseCase_) { case 1: - hash = (37 * hash) + OFFSET_FIELD_NUMBER; - hash = (53 * hash) + com.google.protobuf.Internal.hashLong(getOffset()); + hash = (37 * hash) + APPEND_RESULT_FIELD_NUMBER; + hash = (53 * hash) + getAppendResult().hashCode(); break; case 2: hash = (37 * hash) + ERROR_FIELD_NUMBER; @@ -601,7 +1483,11 @@ public com.google.cloud.bigquery.storage.v1beta2.AppendRowsResponse buildPartial com.google.cloud.bigquery.storage.v1beta2.AppendRowsResponse result = new com.google.cloud.bigquery.storage.v1beta2.AppendRowsResponse(this); if (responseCase_ == 1) { - result.response_ = response_; + if (appendResultBuilder_ == null) { + result.response_ = response_; + } else { + result.response_ = appendResultBuilder_.build(); + } } if (responseCase_ == 2) { if (errorBuilder_ == null) { @@ -671,9 +1557,9 @@ public Builder mergeFrom(com.google.cloud.bigquery.storage.v1beta2.AppendRowsRes mergeUpdatedSchema(other.getUpdatedSchema()); } switch (other.getResponseCase()) { - case OFFSET: + case APPEND_RESULT: { - setOffset(other.getOffset()); + mergeAppendResult(other.getAppendResult()); break; } case ERROR: @@ -730,60 +1616,249 @@ public Builder clearResponse() { return this; } + private com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.AppendResult, + com.google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.AppendResult.Builder, + com.google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.AppendResultOrBuilder> + appendResultBuilder_; /** * * *
-     * The row offset at which the last append occurred.
+     * Result if the append is successful.
      * 
* - * int64 offset = 1; + * + * .google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.AppendResult append_result = 1; + * * - * @return The offset. + * @return Whether the appendResult field is set. */ - public long getOffset() { - if (responseCase_ == 1) { - return (java.lang.Long) response_; - } - return 0L; + @java.lang.Override + public boolean hasAppendResult() { + return responseCase_ == 1; } /** * * *
-     * The row offset at which the last append occurred.
+     * Result if the append is successful.
      * 
* - * int64 offset = 1; + * + * .google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.AppendResult append_result = 1; + * * - * @param value The offset to set. - * @return This builder for chaining. + * @return The appendResult. */ - public Builder setOffset(long value) { + @java.lang.Override + public com.google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.AppendResult + getAppendResult() { + if (appendResultBuilder_ == null) { + if (responseCase_ == 1) { + return (com.google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.AppendResult) + response_; + } + return com.google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.AppendResult + .getDefaultInstance(); + } else { + if (responseCase_ == 1) { + return appendResultBuilder_.getMessage(); + } + return com.google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.AppendResult + .getDefaultInstance(); + } + } + /** + * + * + *
+     * Result if the append is successful.
+     * 
+ * + * + * .google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.AppendResult append_result = 1; + * + */ + public Builder setAppendResult( + com.google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.AppendResult value) { + if (appendResultBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + response_ = value; + onChanged(); + } else { + appendResultBuilder_.setMessage(value); + } responseCase_ = 1; - response_ = value; - onChanged(); return this; } /** * * *
-     * The row offset at which the last append occurred.
+     * Result if the append is successful.
      * 
* - * int64 offset = 1; + * + * .google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.AppendResult append_result = 1; + * + */ + public Builder setAppendResult( + com.google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.AppendResult.Builder + builderForValue) { + if (appendResultBuilder_ == null) { + response_ = builderForValue.build(); + onChanged(); + } else { + appendResultBuilder_.setMessage(builderForValue.build()); + } + responseCase_ = 1; + return this; + } + /** + * * - * @return This builder for chaining. + *
+     * Result if the append is successful.
+     * 
+ * + * + * .google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.AppendResult append_result = 1; + * */ - public Builder clearOffset() { - if (responseCase_ == 1) { - responseCase_ = 0; - response_ = null; + public Builder mergeAppendResult( + com.google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.AppendResult value) { + if (appendResultBuilder_ == null) { + if (responseCase_ == 1 + && response_ + != com.google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.AppendResult + .getDefaultInstance()) { + response_ = + com.google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.AppendResult.newBuilder( + (com.google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.AppendResult) + response_) + .mergeFrom(value) + .buildPartial(); + } else { + response_ = value; + } onChanged(); + } else { + if (responseCase_ == 1) { + appendResultBuilder_.mergeFrom(value); + } + appendResultBuilder_.setMessage(value); + } + responseCase_ = 1; + return this; + } + /** + * + * + *
+     * Result if the append is successful.
+     * 
+ * + * + * .google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.AppendResult append_result = 1; + * + */ + public Builder clearAppendResult() { + if (appendResultBuilder_ == null) { + if (responseCase_ == 1) { + responseCase_ = 0; + response_ = null; + onChanged(); + } + } else { + if (responseCase_ == 1) { + responseCase_ = 0; + response_ = null; + } + appendResultBuilder_.clear(); } return this; } + /** + * + * + *
+     * Result if the append is successful.
+     * 
+ * + * + * .google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.AppendResult append_result = 1; + * + */ + public com.google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.AppendResult.Builder + getAppendResultBuilder() { + return getAppendResultFieldBuilder().getBuilder(); + } + /** + * + * + *
+     * Result if the append is successful.
+     * 
+ * + * + * .google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.AppendResult append_result = 1; + * + */ + @java.lang.Override + public com.google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.AppendResultOrBuilder + getAppendResultOrBuilder() { + if ((responseCase_ == 1) && (appendResultBuilder_ != null)) { + return appendResultBuilder_.getMessageOrBuilder(); + } else { + if (responseCase_ == 1) { + return (com.google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.AppendResult) + response_; + } + return com.google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.AppendResult + .getDefaultInstance(); + } + } + /** + * + * + *
+     * Result if the append is successful.
+     * 
+ * + * + * .google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.AppendResult append_result = 1; + * + */ + private com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.AppendResult, + com.google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.AppendResult.Builder, + com.google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.AppendResultOrBuilder> + getAppendResultFieldBuilder() { + if (appendResultBuilder_ == null) { + if (!(responseCase_ == 1)) { + response_ = + com.google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.AppendResult + .getDefaultInstance(); + } + appendResultBuilder_ = + new com.google.protobuf.SingleFieldBuilderV3< + com.google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.AppendResult, + com.google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.AppendResult.Builder, + com.google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.AppendResultOrBuilder>( + (com.google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.AppendResult) + response_, + getParentForChildren(), + isClean()); + response_ = null; + } + responseCase_ = 1; + onChanged(); + ; + return appendResultBuilder_; + } private com.google.protobuf.SingleFieldBuilderV3< com.google.rpc.Status, com.google.rpc.Status.Builder, com.google.rpc.StatusOrBuilder> @@ -792,8 +1867,19 @@ public Builder clearOffset() { * * *
-     * Error in case of append failure. If set, it means rows are not accepted
-     * into the system. Users can retry within the same connection.
+     * Error in case of request failed. If set, it means rows are not accepted
+     * into the system. Users can retry or continue with other requests within
+     * the same connection.
+     * ALREADY_EXISTS: happens when offset is specified, it means the entire
+     *   request is already appended, it is safe to ignore this error.
+     * OUT_OF_RANGE: happens when offset is specified, it means the specified
+     *   offset is beyond the end of the stream.
+     * INVALID_ARGUMENT: error caused by malformed request or data.
+     * RESOURCE_EXHAUSTED: request rejected due to throttling. Only happens when
+     *   append without offset.
+     * ABORTED: request processing is aborted because of prior failures, request
+     *   can be retried if previous failure is fixed.
+     * INTERNAL: server side errors that can be retried.
      * 
* * .google.rpc.Status error = 2; @@ -808,8 +1894,19 @@ public boolean hasError() { * * *
-     * Error in case of append failure. If set, it means rows are not accepted
-     * into the system. Users can retry within the same connection.
+     * Error in case of request failed. If set, it means rows are not accepted
+     * into the system. Users can retry or continue with other requests within
+     * the same connection.
+     * ALREADY_EXISTS: happens when offset is specified, it means the entire
+     *   request is already appended, it is safe to ignore this error.
+     * OUT_OF_RANGE: happens when offset is specified, it means the specified
+     *   offset is beyond the end of the stream.
+     * INVALID_ARGUMENT: error caused by malformed request or data.
+     * RESOURCE_EXHAUSTED: request rejected due to throttling. Only happens when
+     *   append without offset.
+     * ABORTED: request processing is aborted because of prior failures, request
+     *   can be retried if previous failure is fixed.
+     * INTERNAL: server side errors that can be retried.
      * 
* * .google.rpc.Status error = 2; @@ -834,8 +1931,19 @@ public com.google.rpc.Status getError() { * * *
-     * Error in case of append failure. If set, it means rows are not accepted
-     * into the system. Users can retry within the same connection.
+     * Error in case of request failed. If set, it means rows are not accepted
+     * into the system. Users can retry or continue with other requests within
+     * the same connection.
+     * ALREADY_EXISTS: happens when offset is specified, it means the entire
+     *   request is already appended, it is safe to ignore this error.
+     * OUT_OF_RANGE: happens when offset is specified, it means the specified
+     *   offset is beyond the end of the stream.
+     * INVALID_ARGUMENT: error caused by malformed request or data.
+     * RESOURCE_EXHAUSTED: request rejected due to throttling. Only happens when
+     *   append without offset.
+     * ABORTED: request processing is aborted because of prior failures, request
+     *   can be retried if previous failure is fixed.
+     * INTERNAL: server side errors that can be retried.
      * 
* * .google.rpc.Status error = 2; @@ -857,8 +1965,19 @@ public Builder setError(com.google.rpc.Status value) { * * *
-     * Error in case of append failure. If set, it means rows are not accepted
-     * into the system. Users can retry within the same connection.
+     * Error in case of request failed. If set, it means rows are not accepted
+     * into the system. Users can retry or continue with other requests within
+     * the same connection.
+     * ALREADY_EXISTS: happens when offset is specified, it means the entire
+     *   request is already appended, it is safe to ignore this error.
+     * OUT_OF_RANGE: happens when offset is specified, it means the specified
+     *   offset is beyond the end of the stream.
+     * INVALID_ARGUMENT: error caused by malformed request or data.
+     * RESOURCE_EXHAUSTED: request rejected due to throttling. Only happens when
+     *   append without offset.
+     * ABORTED: request processing is aborted because of prior failures, request
+     *   can be retried if previous failure is fixed.
+     * INTERNAL: server side errors that can be retried.
      * 
* * .google.rpc.Status error = 2; @@ -877,8 +1996,19 @@ public Builder setError(com.google.rpc.Status.Builder builderForValue) { * * *
-     * Error in case of append failure. If set, it means rows are not accepted
-     * into the system. Users can retry within the same connection.
+     * Error in case of request failed. If set, it means rows are not accepted
+     * into the system. Users can retry or continue with other requests within
+     * the same connection.
+     * ALREADY_EXISTS: happens when offset is specified, it means the entire
+     *   request is already appended, it is safe to ignore this error.
+     * OUT_OF_RANGE: happens when offset is specified, it means the specified
+     *   offset is beyond the end of the stream.
+     * INVALID_ARGUMENT: error caused by malformed request or data.
+     * RESOURCE_EXHAUSTED: request rejected due to throttling. Only happens when
+     *   append without offset.
+     * ABORTED: request processing is aborted because of prior failures, request
+     *   can be retried if previous failure is fixed.
+     * INTERNAL: server side errors that can be retried.
      * 
* * .google.rpc.Status error = 2; @@ -907,8 +2037,19 @@ public Builder mergeError(com.google.rpc.Status value) { * * *
-     * Error in case of append failure. If set, it means rows are not accepted
-     * into the system. Users can retry within the same connection.
+     * Error in case of request failed. If set, it means rows are not accepted
+     * into the system. Users can retry or continue with other requests within
+     * the same connection.
+     * ALREADY_EXISTS: happens when offset is specified, it means the entire
+     *   request is already appended, it is safe to ignore this error.
+     * OUT_OF_RANGE: happens when offset is specified, it means the specified
+     *   offset is beyond the end of the stream.
+     * INVALID_ARGUMENT: error caused by malformed request or data.
+     * RESOURCE_EXHAUSTED: request rejected due to throttling. Only happens when
+     *   append without offset.
+     * ABORTED: request processing is aborted because of prior failures, request
+     *   can be retried if previous failure is fixed.
+     * INTERNAL: server side errors that can be retried.
      * 
* * .google.rpc.Status error = 2; @@ -933,8 +2074,19 @@ public Builder clearError() { * * *
-     * Error in case of append failure. If set, it means rows are not accepted
-     * into the system. Users can retry within the same connection.
+     * Error in case of request failed. If set, it means rows are not accepted
+     * into the system. Users can retry or continue with other requests within
+     * the same connection.
+     * ALREADY_EXISTS: happens when offset is specified, it means the entire
+     *   request is already appended, it is safe to ignore this error.
+     * OUT_OF_RANGE: happens when offset is specified, it means the specified
+     *   offset is beyond the end of the stream.
+     * INVALID_ARGUMENT: error caused by malformed request or data.
+     * RESOURCE_EXHAUSTED: request rejected due to throttling. Only happens when
+     *   append without offset.
+     * ABORTED: request processing is aborted because of prior failures, request
+     *   can be retried if previous failure is fixed.
+     * INTERNAL: server side errors that can be retried.
      * 
* * .google.rpc.Status error = 2; @@ -946,8 +2098,19 @@ public com.google.rpc.Status.Builder getErrorBuilder() { * * *
-     * Error in case of append failure. If set, it means rows are not accepted
-     * into the system. Users can retry within the same connection.
+     * Error in case of request failed. If set, it means rows are not accepted
+     * into the system. Users can retry or continue with other requests within
+     * the same connection.
+     * ALREADY_EXISTS: happens when offset is specified, it means the entire
+     *   request is already appended, it is safe to ignore this error.
+     * OUT_OF_RANGE: happens when offset is specified, it means the specified
+     *   offset is beyond the end of the stream.
+     * INVALID_ARGUMENT: error caused by malformed request or data.
+     * RESOURCE_EXHAUSTED: request rejected due to throttling. Only happens when
+     *   append without offset.
+     * ABORTED: request processing is aborted because of prior failures, request
+     *   can be retried if previous failure is fixed.
+     * INTERNAL: server side errors that can be retried.
      * 
* * .google.rpc.Status error = 2; @@ -967,8 +2130,19 @@ public com.google.rpc.StatusOrBuilder getErrorOrBuilder() { * * *
-     * Error in case of append failure. If set, it means rows are not accepted
-     * into the system. Users can retry within the same connection.
+     * Error in case of request failed. If set, it means rows are not accepted
+     * into the system. Users can retry or continue with other requests within
+     * the same connection.
+     * ALREADY_EXISTS: happens when offset is specified, it means the entire
+     *   request is already appended, it is safe to ignore this error.
+     * OUT_OF_RANGE: happens when offset is specified, it means the specified
+     *   offset is beyond the end of the stream.
+     * INVALID_ARGUMENT: error caused by malformed request or data.
+     * RESOURCE_EXHAUSTED: request rejected due to throttling. Only happens when
+     *   append without offset.
+     * ABORTED: request processing is aborted because of prior failures, request
+     *   can be retried if previous failure is fixed.
+     * INTERNAL: server side errors that can be retried.
      * 
* * .google.rpc.Status error = 2; diff --git a/proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/AppendRowsResponseOrBuilder.java b/proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/AppendRowsResponseOrBuilder.java index 9711bc2758..fe320fb6c5 100644 --- a/proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/AppendRowsResponseOrBuilder.java +++ b/proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/AppendRowsResponseOrBuilder.java @@ -27,21 +27,58 @@ public interface AppendRowsResponseOrBuilder * * *
-   * The row offset at which the last append occurred.
+   * Result if the append is successful.
    * 
* - * int64 offset = 1; + * .google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.AppendResult append_result = 1; + * * - * @return The offset. + * @return Whether the appendResult field is set. */ - long getOffset(); + boolean hasAppendResult(); + /** + * + * + *
+   * Result if the append is successful.
+   * 
+ * + * .google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.AppendResult append_result = 1; + * + * + * @return The appendResult. + */ + com.google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.AppendResult getAppendResult(); + /** + * + * + *
+   * Result if the append is successful.
+   * 
+ * + * .google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.AppendResult append_result = 1; + * + */ + com.google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.AppendResultOrBuilder + getAppendResultOrBuilder(); /** * * *
-   * Error in case of append failure. If set, it means rows are not accepted
-   * into the system. Users can retry within the same connection.
+   * Error in case of request failed. If set, it means rows are not accepted
+   * into the system. Users can retry or continue with other requests within
+   * the same connection.
+   * ALREADY_EXISTS: happens when offset is specified, it means the entire
+   *   request is already appended, it is safe to ignore this error.
+   * OUT_OF_RANGE: happens when offset is specified, it means the specified
+   *   offset is beyond the end of the stream.
+   * INVALID_ARGUMENT: error caused by malformed request or data.
+   * RESOURCE_EXHAUSTED: request rejected due to throttling. Only happens when
+   *   append without offset.
+   * ABORTED: request processing is aborted because of prior failures, request
+   *   can be retried if previous failure is fixed.
+   * INTERNAL: server side errors that can be retried.
    * 
* * .google.rpc.Status error = 2; @@ -53,8 +90,19 @@ public interface AppendRowsResponseOrBuilder * * *
-   * Error in case of append failure. If set, it means rows are not accepted
-   * into the system. Users can retry within the same connection.
+   * Error in case of request failed. If set, it means rows are not accepted
+   * into the system. Users can retry or continue with other requests within
+   * the same connection.
+   * ALREADY_EXISTS: happens when offset is specified, it means the entire
+   *   request is already appended, it is safe to ignore this error.
+   * OUT_OF_RANGE: happens when offset is specified, it means the specified
+   *   offset is beyond the end of the stream.
+   * INVALID_ARGUMENT: error caused by malformed request or data.
+   * RESOURCE_EXHAUSTED: request rejected due to throttling. Only happens when
+   *   append without offset.
+   * ABORTED: request processing is aborted because of prior failures, request
+   *   can be retried if previous failure is fixed.
+   * INTERNAL: server side errors that can be retried.
    * 
* * .google.rpc.Status error = 2; @@ -66,8 +114,19 @@ public interface AppendRowsResponseOrBuilder * * *
-   * Error in case of append failure. If set, it means rows are not accepted
-   * into the system. Users can retry within the same connection.
+   * Error in case of request failed. If set, it means rows are not accepted
+   * into the system. Users can retry or continue with other requests within
+   * the same connection.
+   * ALREADY_EXISTS: happens when offset is specified, it means the entire
+   *   request is already appended, it is safe to ignore this error.
+   * OUT_OF_RANGE: happens when offset is specified, it means the specified
+   *   offset is beyond the end of the stream.
+   * INVALID_ARGUMENT: error caused by malformed request or data.
+   * RESOURCE_EXHAUSTED: request rejected due to throttling. Only happens when
+   *   append without offset.
+   * ABORTED: request processing is aborted because of prior failures, request
+   *   can be retried if previous failure is fixed.
+   * INTERNAL: server side errors that can be retried.
    * 
* * .google.rpc.Status error = 2; diff --git a/proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/BatchCommitWriteStreamsResponse.java b/proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/BatchCommitWriteStreamsResponse.java index 1a43072756..0fa6c5e7c5 100644 --- a/proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/BatchCommitWriteStreamsResponse.java +++ b/proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/BatchCommitWriteStreamsResponse.java @@ -38,7 +38,9 @@ private BatchCommitWriteStreamsResponse( super(builder); } - private BatchCommitWriteStreamsResponse() {} + private BatchCommitWriteStreamsResponse() { + streamErrors_ = java.util.Collections.emptyList(); + } @java.lang.Override @SuppressWarnings({"unused"}) @@ -59,6 +61,7 @@ private BatchCommitWriteStreamsResponse( if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } + int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { @@ -84,6 +87,20 @@ private BatchCommitWriteStreamsResponse( break; } + case 18: + { + if (!((mutable_bitField0_ & 0x00000001) != 0)) { + streamErrors_ = + new java.util.ArrayList< + com.google.cloud.bigquery.storage.v1beta2.StorageError>(); + mutable_bitField0_ |= 0x00000001; + } + streamErrors_.add( + input.readMessage( + com.google.cloud.bigquery.storage.v1beta2.StorageError.parser(), + extensionRegistry)); + break; + } default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { @@ -98,6 +115,9 @@ private BatchCommitWriteStreamsResponse( } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this); } finally { + if (((mutable_bitField0_ & 0x00000001) != 0)) { + streamErrors_ = java.util.Collections.unmodifiableList(streamErrors_); + } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } @@ -126,6 +146,7 @@ public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { * *
    * The time at which streams were committed in microseconds granularity.
+   * This field will only exist when there is no stream errors.
    * 
* * .google.protobuf.Timestamp commit_time = 1; @@ -141,6 +162,7 @@ public boolean hasCommitTime() { * *
    * The time at which streams were committed in microseconds granularity.
+   * This field will only exist when there is no stream errors.
    * 
* * .google.protobuf.Timestamp commit_time = 1; @@ -156,6 +178,7 @@ public com.google.protobuf.Timestamp getCommitTime() { * *
    * The time at which streams were committed in microseconds granularity.
+   * This field will only exist when there is no stream errors.
    * 
* * .google.protobuf.Timestamp commit_time = 1; @@ -165,6 +188,82 @@ public com.google.protobuf.TimestampOrBuilder getCommitTimeOrBuilder() { return getCommitTime(); } + public static final int STREAM_ERRORS_FIELD_NUMBER = 2; + private java.util.List streamErrors_; + /** + * + * + *
+   * Stream level error if commit failed. Only streams with error will be in
+   * the list.
+   * 
+ * + * repeated .google.cloud.bigquery.storage.v1beta2.StorageError stream_errors = 2; + */ + @java.lang.Override + public java.util.List + getStreamErrorsList() { + return streamErrors_; + } + /** + * + * + *
+   * Stream level error if commit failed. Only streams with error will be in
+   * the list.
+   * 
+ * + * repeated .google.cloud.bigquery.storage.v1beta2.StorageError stream_errors = 2; + */ + @java.lang.Override + public java.util.List + getStreamErrorsOrBuilderList() { + return streamErrors_; + } + /** + * + * + *
+   * Stream level error if commit failed. Only streams with error will be in
+   * the list.
+   * 
+ * + * repeated .google.cloud.bigquery.storage.v1beta2.StorageError stream_errors = 2; + */ + @java.lang.Override + public int getStreamErrorsCount() { + return streamErrors_.size(); + } + /** + * + * + *
+   * Stream level error if commit failed. Only streams with error will be in
+   * the list.
+   * 
+ * + * repeated .google.cloud.bigquery.storage.v1beta2.StorageError stream_errors = 2; + */ + @java.lang.Override + public com.google.cloud.bigquery.storage.v1beta2.StorageError getStreamErrors(int index) { + return streamErrors_.get(index); + } + /** + * + * + *
+   * Stream level error if commit failed. Only streams with error will be in
+   * the list.
+   * 
+ * + * repeated .google.cloud.bigquery.storage.v1beta2.StorageError stream_errors = 2; + */ + @java.lang.Override + public com.google.cloud.bigquery.storage.v1beta2.StorageErrorOrBuilder getStreamErrorsOrBuilder( + int index) { + return streamErrors_.get(index); + } + private byte memoizedIsInitialized = -1; @java.lang.Override @@ -182,6 +281,9 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io if (commitTime_ != null) { output.writeMessage(1, getCommitTime()); } + for (int i = 0; i < streamErrors_.size(); i++) { + output.writeMessage(2, streamErrors_.get(i)); + } unknownFields.writeTo(output); } @@ -194,6 +296,9 @@ public int getSerializedSize() { if (commitTime_ != null) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getCommitTime()); } + for (int i = 0; i < streamErrors_.size(); i++) { + size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, streamErrors_.get(i)); + } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; @@ -215,6 +320,7 @@ public boolean equals(final java.lang.Object obj) { if (hasCommitTime()) { if (!getCommitTime().equals(other.getCommitTime())) return false; } + if (!getStreamErrorsList().equals(other.getStreamErrorsList())) return false; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @@ -230,6 +336,10 @@ public int hashCode() { hash = (37 * hash) + COMMIT_TIME_FIELD_NUMBER; hash = (53 * hash) + getCommitTime().hashCode(); } + if (getStreamErrorsCount() > 0) { + hash = (37 * hash) + STREAM_ERRORS_FIELD_NUMBER; + hash = (53 * hash) + getStreamErrorsList().hashCode(); + } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; @@ -373,7 +483,9 @@ private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {} + if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { + getStreamErrorsFieldBuilder(); + } } @java.lang.Override @@ -385,6 +497,12 @@ public Builder clear() { commitTime_ = null; commitTimeBuilder_ = null; } + if (streamErrorsBuilder_ == null) { + streamErrors_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + } else { + streamErrorsBuilder_.clear(); + } return this; } @@ -416,11 +534,21 @@ public com.google.cloud.bigquery.storage.v1beta2.BatchCommitWriteStreamsResponse buildPartial() { com.google.cloud.bigquery.storage.v1beta2.BatchCommitWriteStreamsResponse result = new com.google.cloud.bigquery.storage.v1beta2.BatchCommitWriteStreamsResponse(this); + int from_bitField0_ = bitField0_; if (commitTimeBuilder_ == null) { result.commitTime_ = commitTime_; } else { result.commitTime_ = commitTimeBuilder_.build(); } + if (streamErrorsBuilder_ == null) { + if (((bitField0_ & 0x00000001) != 0)) { + streamErrors_ = java.util.Collections.unmodifiableList(streamErrors_); + bitField0_ = (bitField0_ & ~0x00000001); + } + result.streamErrors_ = streamErrors_; + } else { + result.streamErrors_ = streamErrorsBuilder_.build(); + } onBuilt(); return result; } @@ -478,6 +606,33 @@ public Builder mergeFrom( if (other.hasCommitTime()) { mergeCommitTime(other.getCommitTime()); } + if (streamErrorsBuilder_ == null) { + if (!other.streamErrors_.isEmpty()) { + if (streamErrors_.isEmpty()) { + streamErrors_ = other.streamErrors_; + bitField0_ = (bitField0_ & ~0x00000001); + } else { + ensureStreamErrorsIsMutable(); + streamErrors_.addAll(other.streamErrors_); + } + onChanged(); + } + } else { + if (!other.streamErrors_.isEmpty()) { + if (streamErrorsBuilder_.isEmpty()) { + streamErrorsBuilder_.dispose(); + streamErrorsBuilder_ = null; + streamErrors_ = other.streamErrors_; + bitField0_ = (bitField0_ & ~0x00000001); + streamErrorsBuilder_ = + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders + ? getStreamErrorsFieldBuilder() + : null; + } else { + streamErrorsBuilder_.addAllMessages(other.streamErrors_); + } + } + } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; @@ -510,6 +665,8 @@ public Builder mergeFrom( return this; } + private int bitField0_; + private com.google.protobuf.Timestamp commitTime_; private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.Timestamp, @@ -521,6 +678,7 @@ public Builder mergeFrom( * *
      * The time at which streams were committed in microseconds granularity.
+     * This field will only exist when there is no stream errors.
      * 
* * .google.protobuf.Timestamp commit_time = 1; @@ -535,6 +693,7 @@ public boolean hasCommitTime() { * *
      * The time at which streams were committed in microseconds granularity.
+     * This field will only exist when there is no stream errors.
      * 
* * .google.protobuf.Timestamp commit_time = 1; @@ -555,6 +714,7 @@ public com.google.protobuf.Timestamp getCommitTime() { * *
      * The time at which streams were committed in microseconds granularity.
+     * This field will only exist when there is no stream errors.
      * 
* * .google.protobuf.Timestamp commit_time = 1; @@ -577,6 +737,7 @@ public Builder setCommitTime(com.google.protobuf.Timestamp value) { * *
      * The time at which streams were committed in microseconds granularity.
+     * This field will only exist when there is no stream errors.
      * 
* * .google.protobuf.Timestamp commit_time = 1; @@ -596,6 +757,7 @@ public Builder setCommitTime(com.google.protobuf.Timestamp.Builder builderForVal * *
      * The time at which streams were committed in microseconds granularity.
+     * This field will only exist when there is no stream errors.
      * 
* * .google.protobuf.Timestamp commit_time = 1; @@ -620,6 +782,7 @@ public Builder mergeCommitTime(com.google.protobuf.Timestamp value) { * *
      * The time at which streams were committed in microseconds granularity.
+     * This field will only exist when there is no stream errors.
      * 
* * .google.protobuf.Timestamp commit_time = 1; @@ -640,6 +803,7 @@ public Builder clearCommitTime() { * *
      * The time at which streams were committed in microseconds granularity.
+     * This field will only exist when there is no stream errors.
      * 
* * .google.protobuf.Timestamp commit_time = 1; @@ -654,6 +818,7 @@ public com.google.protobuf.Timestamp.Builder getCommitTimeBuilder() { * *
      * The time at which streams were committed in microseconds granularity.
+     * This field will only exist when there is no stream errors.
      * 
* * .google.protobuf.Timestamp commit_time = 1; @@ -672,6 +837,7 @@ public com.google.protobuf.TimestampOrBuilder getCommitTimeOrBuilder() { * *
      * The time at which streams were committed in microseconds granularity.
+     * This field will only exist when there is no stream errors.
      * 
* * .google.protobuf.Timestamp commit_time = 1; @@ -693,6 +859,384 @@ public com.google.protobuf.TimestampOrBuilder getCommitTimeOrBuilder() { return commitTimeBuilder_; } + private java.util.List streamErrors_ = + java.util.Collections.emptyList(); + + private void ensureStreamErrorsIsMutable() { + if (!((bitField0_ & 0x00000001) != 0)) { + streamErrors_ = + new java.util.ArrayList( + streamErrors_); + bitField0_ |= 0x00000001; + } + } + + private com.google.protobuf.RepeatedFieldBuilderV3< + com.google.cloud.bigquery.storage.v1beta2.StorageError, + com.google.cloud.bigquery.storage.v1beta2.StorageError.Builder, + com.google.cloud.bigquery.storage.v1beta2.StorageErrorOrBuilder> + streamErrorsBuilder_; + + /** + * + * + *
+     * Stream level error if commit failed. Only streams with error will be in
+     * the list.
+     * 
+ * + * repeated .google.cloud.bigquery.storage.v1beta2.StorageError stream_errors = 2; + */ + public java.util.List + getStreamErrorsList() { + if (streamErrorsBuilder_ == null) { + return java.util.Collections.unmodifiableList(streamErrors_); + } else { + return streamErrorsBuilder_.getMessageList(); + } + } + /** + * + * + *
+     * Stream level error if commit failed. Only streams with error will be in
+     * the list.
+     * 
+ * + * repeated .google.cloud.bigquery.storage.v1beta2.StorageError stream_errors = 2; + */ + public int getStreamErrorsCount() { + if (streamErrorsBuilder_ == null) { + return streamErrors_.size(); + } else { + return streamErrorsBuilder_.getCount(); + } + } + /** + * + * + *
+     * Stream level error if commit failed. Only streams with error will be in
+     * the list.
+     * 
+ * + * repeated .google.cloud.bigquery.storage.v1beta2.StorageError stream_errors = 2; + */ + public com.google.cloud.bigquery.storage.v1beta2.StorageError getStreamErrors(int index) { + if (streamErrorsBuilder_ == null) { + return streamErrors_.get(index); + } else { + return streamErrorsBuilder_.getMessage(index); + } + } + /** + * + * + *
+     * Stream level error if commit failed. Only streams with error will be in
+     * the list.
+     * 
+ * + * repeated .google.cloud.bigquery.storage.v1beta2.StorageError stream_errors = 2; + */ + public Builder setStreamErrors( + int index, com.google.cloud.bigquery.storage.v1beta2.StorageError value) { + if (streamErrorsBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureStreamErrorsIsMutable(); + streamErrors_.set(index, value); + onChanged(); + } else { + streamErrorsBuilder_.setMessage(index, value); + } + return this; + } + /** + * + * + *
+     * Stream level error if commit failed. Only streams with error will be in
+     * the list.
+     * 
+ * + * repeated .google.cloud.bigquery.storage.v1beta2.StorageError stream_errors = 2; + */ + public Builder setStreamErrors( + int index, com.google.cloud.bigquery.storage.v1beta2.StorageError.Builder builderForValue) { + if (streamErrorsBuilder_ == null) { + ensureStreamErrorsIsMutable(); + streamErrors_.set(index, builderForValue.build()); + onChanged(); + } else { + streamErrorsBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + /** + * + * + *
+     * Stream level error if commit failed. Only streams with error will be in
+     * the list.
+     * 
+ * + * repeated .google.cloud.bigquery.storage.v1beta2.StorageError stream_errors = 2; + */ + public Builder addStreamErrors(com.google.cloud.bigquery.storage.v1beta2.StorageError value) { + if (streamErrorsBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureStreamErrorsIsMutable(); + streamErrors_.add(value); + onChanged(); + } else { + streamErrorsBuilder_.addMessage(value); + } + return this; + } + /** + * + * + *
+     * Stream level error if commit failed. Only streams with error will be in
+     * the list.
+     * 
+ * + * repeated .google.cloud.bigquery.storage.v1beta2.StorageError stream_errors = 2; + */ + public Builder addStreamErrors( + int index, com.google.cloud.bigquery.storage.v1beta2.StorageError value) { + if (streamErrorsBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureStreamErrorsIsMutable(); + streamErrors_.add(index, value); + onChanged(); + } else { + streamErrorsBuilder_.addMessage(index, value); + } + return this; + } + /** + * + * + *
+     * Stream level error if commit failed. Only streams with error will be in
+     * the list.
+     * 
+ * + * repeated .google.cloud.bigquery.storage.v1beta2.StorageError stream_errors = 2; + */ + public Builder addStreamErrors( + com.google.cloud.bigquery.storage.v1beta2.StorageError.Builder builderForValue) { + if (streamErrorsBuilder_ == null) { + ensureStreamErrorsIsMutable(); + streamErrors_.add(builderForValue.build()); + onChanged(); + } else { + streamErrorsBuilder_.addMessage(builderForValue.build()); + } + return this; + } + /** + * + * + *
+     * Stream level error if commit failed. Only streams with error will be in
+     * the list.
+     * 
+ * + * repeated .google.cloud.bigquery.storage.v1beta2.StorageError stream_errors = 2; + */ + public Builder addStreamErrors( + int index, com.google.cloud.bigquery.storage.v1beta2.StorageError.Builder builderForValue) { + if (streamErrorsBuilder_ == null) { + ensureStreamErrorsIsMutable(); + streamErrors_.add(index, builderForValue.build()); + onChanged(); + } else { + streamErrorsBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + /** + * + * + *
+     * Stream level error if commit failed. Only streams with error will be in
+     * the list.
+     * 
+ * + * repeated .google.cloud.bigquery.storage.v1beta2.StorageError stream_errors = 2; + */ + public Builder addAllStreamErrors( + java.lang.Iterable + values) { + if (streamErrorsBuilder_ == null) { + ensureStreamErrorsIsMutable(); + com.google.protobuf.AbstractMessageLite.Builder.addAll(values, streamErrors_); + onChanged(); + } else { + streamErrorsBuilder_.addAllMessages(values); + } + return this; + } + /** + * + * + *
+     * Stream level error if commit failed. Only streams with error will be in
+     * the list.
+     * 
+ * + * repeated .google.cloud.bigquery.storage.v1beta2.StorageError stream_errors = 2; + */ + public Builder clearStreamErrors() { + if (streamErrorsBuilder_ == null) { + streamErrors_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + } else { + streamErrorsBuilder_.clear(); + } + return this; + } + /** + * + * + *
+     * Stream level error if commit failed. Only streams with error will be in
+     * the list.
+     * 
+ * + * repeated .google.cloud.bigquery.storage.v1beta2.StorageError stream_errors = 2; + */ + public Builder removeStreamErrors(int index) { + if (streamErrorsBuilder_ == null) { + ensureStreamErrorsIsMutable(); + streamErrors_.remove(index); + onChanged(); + } else { + streamErrorsBuilder_.remove(index); + } + return this; + } + /** + * + * + *
+     * Stream level error if commit failed. Only streams with error will be in
+     * the list.
+     * 
+ * + * repeated .google.cloud.bigquery.storage.v1beta2.StorageError stream_errors = 2; + */ + public com.google.cloud.bigquery.storage.v1beta2.StorageError.Builder getStreamErrorsBuilder( + int index) { + return getStreamErrorsFieldBuilder().getBuilder(index); + } + /** + * + * + *
+     * Stream level error if commit failed. Only streams with error will be in
+     * the list.
+     * 
+ * + * repeated .google.cloud.bigquery.storage.v1beta2.StorageError stream_errors = 2; + */ + public com.google.cloud.bigquery.storage.v1beta2.StorageErrorOrBuilder getStreamErrorsOrBuilder( + int index) { + if (streamErrorsBuilder_ == null) { + return streamErrors_.get(index); + } else { + return streamErrorsBuilder_.getMessageOrBuilder(index); + } + } + /** + * + * + *
+     * Stream level error if commit failed. Only streams with error will be in
+     * the list.
+     * 
+ * + * repeated .google.cloud.bigquery.storage.v1beta2.StorageError stream_errors = 2; + */ + public java.util.List + getStreamErrorsOrBuilderList() { + if (streamErrorsBuilder_ != null) { + return streamErrorsBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(streamErrors_); + } + } + /** + * + * + *
+     * Stream level error if commit failed. Only streams with error will be in
+     * the list.
+     * 
+ * + * repeated .google.cloud.bigquery.storage.v1beta2.StorageError stream_errors = 2; + */ + public com.google.cloud.bigquery.storage.v1beta2.StorageError.Builder addStreamErrorsBuilder() { + return getStreamErrorsFieldBuilder() + .addBuilder(com.google.cloud.bigquery.storage.v1beta2.StorageError.getDefaultInstance()); + } + /** + * + * + *
+     * Stream level error if commit failed. Only streams with error will be in
+     * the list.
+     * 
+ * + * repeated .google.cloud.bigquery.storage.v1beta2.StorageError stream_errors = 2; + */ + public com.google.cloud.bigquery.storage.v1beta2.StorageError.Builder addStreamErrorsBuilder( + int index) { + return getStreamErrorsFieldBuilder() + .addBuilder( + index, com.google.cloud.bigquery.storage.v1beta2.StorageError.getDefaultInstance()); + } + /** + * + * + *
+     * Stream level error if commit failed. Only streams with error will be in
+     * the list.
+     * 
+ * + * repeated .google.cloud.bigquery.storage.v1beta2.StorageError stream_errors = 2; + */ + public java.util.List + getStreamErrorsBuilderList() { + return getStreamErrorsFieldBuilder().getBuilderList(); + } + + private com.google.protobuf.RepeatedFieldBuilderV3< + com.google.cloud.bigquery.storage.v1beta2.StorageError, + com.google.cloud.bigquery.storage.v1beta2.StorageError.Builder, + com.google.cloud.bigquery.storage.v1beta2.StorageErrorOrBuilder> + getStreamErrorsFieldBuilder() { + if (streamErrorsBuilder_ == null) { + streamErrorsBuilder_ = + new com.google.protobuf.RepeatedFieldBuilderV3< + com.google.cloud.bigquery.storage.v1beta2.StorageError, + com.google.cloud.bigquery.storage.v1beta2.StorageError.Builder, + com.google.cloud.bigquery.storage.v1beta2.StorageErrorOrBuilder>( + streamErrors_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); + streamErrors_ = null; + } + return streamErrorsBuilder_; + } + @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); diff --git a/proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/BatchCommitWriteStreamsResponseOrBuilder.java b/proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/BatchCommitWriteStreamsResponseOrBuilder.java index 86b396822b..77dea40ef7 100644 --- a/proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/BatchCommitWriteStreamsResponseOrBuilder.java +++ b/proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/BatchCommitWriteStreamsResponseOrBuilder.java @@ -28,6 +28,7 @@ public interface BatchCommitWriteStreamsResponseOrBuilder * *
    * The time at which streams were committed in microseconds granularity.
+   * This field will only exist when there is no stream errors.
    * 
* * .google.protobuf.Timestamp commit_time = 1; @@ -40,6 +41,7 @@ public interface BatchCommitWriteStreamsResponseOrBuilder * *
    * The time at which streams were committed in microseconds granularity.
+   * This field will only exist when there is no stream errors.
    * 
* * .google.protobuf.Timestamp commit_time = 1; @@ -52,9 +54,68 @@ public interface BatchCommitWriteStreamsResponseOrBuilder * *
    * The time at which streams were committed in microseconds granularity.
+   * This field will only exist when there is no stream errors.
    * 
* * .google.protobuf.Timestamp commit_time = 1; */ com.google.protobuf.TimestampOrBuilder getCommitTimeOrBuilder(); + + /** + * + * + *
+   * Stream level error if commit failed. Only streams with error will be in
+   * the list.
+   * 
+ * + * repeated .google.cloud.bigquery.storage.v1beta2.StorageError stream_errors = 2; + */ + java.util.List getStreamErrorsList(); + /** + * + * + *
+   * Stream level error if commit failed. Only streams with error will be in
+   * the list.
+   * 
+ * + * repeated .google.cloud.bigquery.storage.v1beta2.StorageError stream_errors = 2; + */ + com.google.cloud.bigquery.storage.v1beta2.StorageError getStreamErrors(int index); + /** + * + * + *
+   * Stream level error if commit failed. Only streams with error will be in
+   * the list.
+   * 
+ * + * repeated .google.cloud.bigquery.storage.v1beta2.StorageError stream_errors = 2; + */ + int getStreamErrorsCount(); + /** + * + * + *
+   * Stream level error if commit failed. Only streams with error will be in
+   * the list.
+   * 
+ * + * repeated .google.cloud.bigquery.storage.v1beta2.StorageError stream_errors = 2; + */ + java.util.List + getStreamErrorsOrBuilderList(); + /** + * + * + *
+   * Stream level error if commit failed. Only streams with error will be in
+   * the list.
+   * 
+ * + * repeated .google.cloud.bigquery.storage.v1beta2.StorageError stream_errors = 2; + */ + com.google.cloud.bigquery.storage.v1beta2.StorageErrorOrBuilder getStreamErrorsOrBuilder( + int index); } diff --git a/proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/StorageError.java b/proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/StorageError.java new file mode 100644 index 0000000000..11e197c11f --- /dev/null +++ b/proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/StorageError.java @@ -0,0 +1,1214 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/bigquery/storage/v1beta2/storage.proto + +package com.google.cloud.bigquery.storage.v1beta2; + +/** + * + * + *
+ * Structured custom BigQuery Storage error message. The error can be attached
+ * as error details in the returned rpc Status. User can use the info to process
+ * errors in a structural way, rather than having to parse error messages.
+ * 
+ * + * Protobuf type {@code google.cloud.bigquery.storage.v1beta2.StorageError} + */ +public final class StorageError extends com.google.protobuf.GeneratedMessageV3 + implements + // @@protoc_insertion_point(message_implements:google.cloud.bigquery.storage.v1beta2.StorageError) + StorageErrorOrBuilder { + private static final long serialVersionUID = 0L; + // Use StorageError.newBuilder() to construct. + private StorageError(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + + private StorageError() { + code_ = 0; + entity_ = ""; + errorMessage_ = ""; + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance(UnusedPrivateParameter unused) { + return new StorageError(); + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet getUnknownFields() { + return this.unknownFields; + } + + private StorageError( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 8: + { + int rawValue = input.readEnum(); + + code_ = rawValue; + break; + } + case 18: + { + java.lang.String s = input.readStringRequireUtf8(); + + entity_ = s; + break; + } + case 26: + { + java.lang.String s = input.readStringRequireUtf8(); + + errorMessage_ = s; + break; + } + default: + { + if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + + public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { + return com.google.cloud.bigquery.storage.v1beta2.StorageProto + .internal_static_google_cloud_bigquery_storage_v1beta2_StorageError_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.bigquery.storage.v1beta2.StorageProto + .internal_static_google_cloud_bigquery_storage_v1beta2_StorageError_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.bigquery.storage.v1beta2.StorageError.class, + com.google.cloud.bigquery.storage.v1beta2.StorageError.Builder.class); + } + + /** + * + * + *
+   * Error code for `StorageError`.
+   * 
+ * + * Protobuf enum {@code google.cloud.bigquery.storage.v1beta2.StorageError.StorageErrorCode} + */ + public enum StorageErrorCode implements com.google.protobuf.ProtocolMessageEnum { + /** + * + * + *
+     * Default error.
+     * 
+ * + * STORAGE_ERROR_CODE_UNSPECIFIED = 0; + */ + STORAGE_ERROR_CODE_UNSPECIFIED(0), + /** + * + * + *
+     * Table is not found in the system.
+     * 
+ * + * TABLE_NOT_FOUND = 1; + */ + TABLE_NOT_FOUND(1), + /** + * + * + *
+     * Stream is already committed.
+     * 
+ * + * STREAM_ALREADY_COMMITTED = 2; + */ + STREAM_ALREADY_COMMITTED(2), + /** + * + * + *
+     * Stream is not found.
+     * 
+ * + * STREAM_NOT_FOUND = 3; + */ + STREAM_NOT_FOUND(3), + /** + * + * + *
+     * Invalid Stream type.
+     * For example, you try to commit a stream that is not pending.
+     * 
+ * + * INVALID_STREAM_TYPE = 4; + */ + INVALID_STREAM_TYPE(4), + /** + * + * + *
+     * Invalid Stream state.
+     * For example, you try to commit a stream that is not fianlized or is
+     * garbaged.
+     * 
+ * + * INVALID_STREAM_STATE = 5; + */ + INVALID_STREAM_STATE(5), + UNRECOGNIZED(-1), + ; + + /** + * + * + *
+     * Default error.
+     * 
+ * + * STORAGE_ERROR_CODE_UNSPECIFIED = 0; + */ + public static final int STORAGE_ERROR_CODE_UNSPECIFIED_VALUE = 0; + /** + * + * + *
+     * Table is not found in the system.
+     * 
+ * + * TABLE_NOT_FOUND = 1; + */ + public static final int TABLE_NOT_FOUND_VALUE = 1; + /** + * + * + *
+     * Stream is already committed.
+     * 
+ * + * STREAM_ALREADY_COMMITTED = 2; + */ + public static final int STREAM_ALREADY_COMMITTED_VALUE = 2; + /** + * + * + *
+     * Stream is not found.
+     * 
+ * + * STREAM_NOT_FOUND = 3; + */ + public static final int STREAM_NOT_FOUND_VALUE = 3; + /** + * + * + *
+     * Invalid Stream type.
+     * For example, you try to commit a stream that is not pending.
+     * 
+ * + * INVALID_STREAM_TYPE = 4; + */ + public static final int INVALID_STREAM_TYPE_VALUE = 4; + /** + * + * + *
+     * Invalid Stream state.
+     * For example, you try to commit a stream that is not fianlized or is
+     * garbaged.
+     * 
+ * + * INVALID_STREAM_STATE = 5; + */ + public static final int INVALID_STREAM_STATE_VALUE = 5; + + public final int getNumber() { + if (this == UNRECOGNIZED) { + throw new java.lang.IllegalArgumentException( + "Can't get the number of an unknown enum value."); + } + return value; + } + + /** + * @param value The numeric wire value of the corresponding enum entry. + * @return The enum associated with the given numeric wire value. + * @deprecated Use {@link #forNumber(int)} instead. + */ + @java.lang.Deprecated + public static StorageErrorCode valueOf(int value) { + return forNumber(value); + } + + /** + * @param value The numeric wire value of the corresponding enum entry. + * @return The enum associated with the given numeric wire value. + */ + public static StorageErrorCode forNumber(int value) { + switch (value) { + case 0: + return STORAGE_ERROR_CODE_UNSPECIFIED; + case 1: + return TABLE_NOT_FOUND; + case 2: + return STREAM_ALREADY_COMMITTED; + case 3: + return STREAM_NOT_FOUND; + case 4: + return INVALID_STREAM_TYPE; + case 5: + return INVALID_STREAM_STATE; + default: + return null; + } + } + + public static com.google.protobuf.Internal.EnumLiteMap internalGetValueMap() { + return internalValueMap; + } + + private static final com.google.protobuf.Internal.EnumLiteMap + internalValueMap = + new com.google.protobuf.Internal.EnumLiteMap() { + public StorageErrorCode findValueByNumber(int number) { + return StorageErrorCode.forNumber(number); + } + }; + + public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { + if (this == UNRECOGNIZED) { + throw new java.lang.IllegalStateException( + "Can't get the descriptor of an unrecognized enum value."); + } + return getDescriptor().getValues().get(ordinal()); + } + + public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { + return getDescriptor(); + } + + public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() { + return com.google.cloud.bigquery.storage.v1beta2.StorageError.getDescriptor() + .getEnumTypes() + .get(0); + } + + private static final StorageErrorCode[] VALUES = values(); + + public static StorageErrorCode valueOf( + com.google.protobuf.Descriptors.EnumValueDescriptor desc) { + if (desc.getType() != getDescriptor()) { + throw new java.lang.IllegalArgumentException("EnumValueDescriptor is not for this type."); + } + if (desc.getIndex() == -1) { + return UNRECOGNIZED; + } + return VALUES[desc.getIndex()]; + } + + private final int value; + + private StorageErrorCode(int value) { + this.value = value; + } + + // @@protoc_insertion_point(enum_scope:google.cloud.bigquery.storage.v1beta2.StorageError.StorageErrorCode) + } + + public static final int CODE_FIELD_NUMBER = 1; + private int code_; + /** + * + * + *
+   * BigQuery Storage specific error code.
+   * 
+ * + * .google.cloud.bigquery.storage.v1beta2.StorageError.StorageErrorCode code = 1; + * + * @return The enum numeric value on the wire for code. + */ + @java.lang.Override + public int getCodeValue() { + return code_; + } + /** + * + * + *
+   * BigQuery Storage specific error code.
+   * 
+ * + * .google.cloud.bigquery.storage.v1beta2.StorageError.StorageErrorCode code = 1; + * + * @return The code. + */ + @java.lang.Override + public com.google.cloud.bigquery.storage.v1beta2.StorageError.StorageErrorCode getCode() { + @SuppressWarnings("deprecation") + com.google.cloud.bigquery.storage.v1beta2.StorageError.StorageErrorCode result = + com.google.cloud.bigquery.storage.v1beta2.StorageError.StorageErrorCode.valueOf(code_); + return result == null + ? com.google.cloud.bigquery.storage.v1beta2.StorageError.StorageErrorCode.UNRECOGNIZED + : result; + } + + public static final int ENTITY_FIELD_NUMBER = 2; + private volatile java.lang.Object entity_; + /** + * + * + *
+   * Name of the failed entity.
+   * 
+ * + * string entity = 2; + * + * @return The entity. + */ + @java.lang.Override + public java.lang.String getEntity() { + java.lang.Object ref = entity_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + entity_ = s; + return s; + } + } + /** + * + * + *
+   * Name of the failed entity.
+   * 
+ * + * string entity = 2; + * + * @return The bytes for entity. + */ + @java.lang.Override + public com.google.protobuf.ByteString getEntityBytes() { + java.lang.Object ref = entity_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + entity_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int ERROR_MESSAGE_FIELD_NUMBER = 3; + private volatile java.lang.Object errorMessage_; + /** + * + * + *
+   * Message that describes the error.
+   * 
+ * + * string error_message = 3; + * + * @return The errorMessage. + */ + @java.lang.Override + public java.lang.String getErrorMessage() { + java.lang.Object ref = errorMessage_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + errorMessage_ = s; + return s; + } + } + /** + * + * + *
+   * Message that describes the error.
+   * 
+ * + * string error_message = 3; + * + * @return The bytes for errorMessage. + */ + @java.lang.Override + public com.google.protobuf.ByteString getErrorMessageBytes() { + java.lang.Object ref = errorMessage_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + errorMessage_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + private byte memoizedIsInitialized = -1; + + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { + if (code_ + != com.google.cloud.bigquery.storage.v1beta2.StorageError.StorageErrorCode + .STORAGE_ERROR_CODE_UNSPECIFIED + .getNumber()) { + output.writeEnum(1, code_); + } + if (!getEntityBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 2, entity_); + } + if (!getErrorMessageBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 3, errorMessage_); + } + unknownFields.writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (code_ + != com.google.cloud.bigquery.storage.v1beta2.StorageError.StorageErrorCode + .STORAGE_ERROR_CODE_UNSPECIFIED + .getNumber()) { + size += com.google.protobuf.CodedOutputStream.computeEnumSize(1, code_); + } + if (!getEntityBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, entity_); + } + if (!getErrorMessageBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, errorMessage_); + } + size += unknownFields.getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof com.google.cloud.bigquery.storage.v1beta2.StorageError)) { + return super.equals(obj); + } + com.google.cloud.bigquery.storage.v1beta2.StorageError other = + (com.google.cloud.bigquery.storage.v1beta2.StorageError) obj; + + if (code_ != other.code_) return false; + if (!getEntity().equals(other.getEntity())) return false; + if (!getErrorMessage().equals(other.getErrorMessage())) return false; + if (!unknownFields.equals(other.unknownFields)) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + hash = (37 * hash) + CODE_FIELD_NUMBER; + hash = (53 * hash) + code_; + hash = (37 * hash) + ENTITY_FIELD_NUMBER; + hash = (53 * hash) + getEntity().hashCode(); + hash = (37 * hash) + ERROR_MESSAGE_FIELD_NUMBER; + hash = (53 * hash) + getErrorMessage().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static com.google.cloud.bigquery.storage.v1beta2.StorageError parseFrom( + java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.storage.v1beta2.StorageError parseFrom( + java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.storage.v1beta2.StorageError parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.storage.v1beta2.StorageError parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.storage.v1beta2.StorageError parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static com.google.cloud.bigquery.storage.v1beta2.StorageError parseFrom( + byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static com.google.cloud.bigquery.storage.v1beta2.StorageError parseFrom( + java.io.InputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.storage.v1beta2.StorageError parseFrom( + java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException( + PARSER, input, extensionRegistry); + } + + public static com.google.cloud.bigquery.storage.v1beta2.StorageError parseDelimitedFrom( + java.io.InputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.storage.v1beta2.StorageError parseDelimitedFrom( + java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( + PARSER, input, extensionRegistry); + } + + public static com.google.cloud.bigquery.storage.v1beta2.StorageError parseFrom( + com.google.protobuf.CodedInputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + } + + public static com.google.cloud.bigquery.storage.v1beta2.StorageError parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException( + PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { + return newBuilder(); + } + + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + + public static Builder newBuilder( + com.google.cloud.bigquery.storage.v1beta2.StorageError prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * + * + *
+   * Structured custom BigQuery Storage error message. The error can be attached
+   * as error details in the returned rpc Status. User can use the info to process
+   * errors in a structural way, rather than having to parse error messages.
+   * 
+ * + * Protobuf type {@code google.cloud.bigquery.storage.v1beta2.StorageError} + */ + public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder + implements + // @@protoc_insertion_point(builder_implements:google.cloud.bigquery.storage.v1beta2.StorageError) + com.google.cloud.bigquery.storage.v1beta2.StorageErrorOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { + return com.google.cloud.bigquery.storage.v1beta2.StorageProto + .internal_static_google_cloud_bigquery_storage_v1beta2_StorageError_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return com.google.cloud.bigquery.storage.v1beta2.StorageProto + .internal_static_google_cloud_bigquery_storage_v1beta2_StorageError_fieldAccessorTable + .ensureFieldAccessorsInitialized( + com.google.cloud.bigquery.storage.v1beta2.StorageError.class, + com.google.cloud.bigquery.storage.v1beta2.StorageError.Builder.class); + } + + // Construct using com.google.cloud.bigquery.storage.v1beta2.StorageError.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {} + } + + @java.lang.Override + public Builder clear() { + super.clear(); + code_ = 0; + + entity_ = ""; + + errorMessage_ = ""; + + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { + return com.google.cloud.bigquery.storage.v1beta2.StorageProto + .internal_static_google_cloud_bigquery_storage_v1beta2_StorageError_descriptor; + } + + @java.lang.Override + public com.google.cloud.bigquery.storage.v1beta2.StorageError getDefaultInstanceForType() { + return com.google.cloud.bigquery.storage.v1beta2.StorageError.getDefaultInstance(); + } + + @java.lang.Override + public com.google.cloud.bigquery.storage.v1beta2.StorageError build() { + com.google.cloud.bigquery.storage.v1beta2.StorageError result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public com.google.cloud.bigquery.storage.v1beta2.StorageError buildPartial() { + com.google.cloud.bigquery.storage.v1beta2.StorageError result = + new com.google.cloud.bigquery.storage.v1beta2.StorageError(this); + result.code_ = code_; + result.entity_ = entity_; + result.errorMessage_ = errorMessage_; + onBuilt(); + return result; + } + + @java.lang.Override + public Builder clone() { + return super.clone(); + } + + @java.lang.Override + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { + return super.setField(field, value); + } + + @java.lang.Override + public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { + return super.clearField(field); + } + + @java.lang.Override + public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return super.clearOneof(oneof); + } + + @java.lang.Override + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { + return super.setRepeatedField(field, index, value); + } + + @java.lang.Override + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { + return super.addRepeatedField(field, value); + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof com.google.cloud.bigquery.storage.v1beta2.StorageError) { + return mergeFrom((com.google.cloud.bigquery.storage.v1beta2.StorageError) other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(com.google.cloud.bigquery.storage.v1beta2.StorageError other) { + if (other == com.google.cloud.bigquery.storage.v1beta2.StorageError.getDefaultInstance()) + return this; + if (other.code_ != 0) { + setCodeValue(other.getCodeValue()); + } + if (!other.getEntity().isEmpty()) { + entity_ = other.entity_; + onChanged(); + } + if (!other.getErrorMessage().isEmpty()) { + errorMessage_ = other.errorMessage_; + onChanged(); + } + this.mergeUnknownFields(other.unknownFields); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.cloud.bigquery.storage.v1beta2.StorageError parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = + (com.google.cloud.bigquery.storage.v1beta2.StorageError) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + + private int code_ = 0; + /** + * + * + *
+     * BigQuery Storage specific error code.
+     * 
+ * + * .google.cloud.bigquery.storage.v1beta2.StorageError.StorageErrorCode code = 1; + * + * @return The enum numeric value on the wire for code. + */ + @java.lang.Override + public int getCodeValue() { + return code_; + } + /** + * + * + *
+     * BigQuery Storage specific error code.
+     * 
+ * + * .google.cloud.bigquery.storage.v1beta2.StorageError.StorageErrorCode code = 1; + * + * @param value The enum numeric value on the wire for code to set. + * @return This builder for chaining. + */ + public Builder setCodeValue(int value) { + + code_ = value; + onChanged(); + return this; + } + /** + * + * + *
+     * BigQuery Storage specific error code.
+     * 
+ * + * .google.cloud.bigquery.storage.v1beta2.StorageError.StorageErrorCode code = 1; + * + * @return The code. + */ + @java.lang.Override + public com.google.cloud.bigquery.storage.v1beta2.StorageError.StorageErrorCode getCode() { + @SuppressWarnings("deprecation") + com.google.cloud.bigquery.storage.v1beta2.StorageError.StorageErrorCode result = + com.google.cloud.bigquery.storage.v1beta2.StorageError.StorageErrorCode.valueOf(code_); + return result == null + ? com.google.cloud.bigquery.storage.v1beta2.StorageError.StorageErrorCode.UNRECOGNIZED + : result; + } + /** + * + * + *
+     * BigQuery Storage specific error code.
+     * 
+ * + * .google.cloud.bigquery.storage.v1beta2.StorageError.StorageErrorCode code = 1; + * + * @param value The code to set. + * @return This builder for chaining. + */ + public Builder setCode( + com.google.cloud.bigquery.storage.v1beta2.StorageError.StorageErrorCode value) { + if (value == null) { + throw new NullPointerException(); + } + + code_ = value.getNumber(); + onChanged(); + return this; + } + /** + * + * + *
+     * BigQuery Storage specific error code.
+     * 
+ * + * .google.cloud.bigquery.storage.v1beta2.StorageError.StorageErrorCode code = 1; + * + * @return This builder for chaining. + */ + public Builder clearCode() { + + code_ = 0; + onChanged(); + return this; + } + + private java.lang.Object entity_ = ""; + /** + * + * + *
+     * Name of the failed entity.
+     * 
+ * + * string entity = 2; + * + * @return The entity. + */ + public java.lang.String getEntity() { + java.lang.Object ref = entity_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + entity_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * + * + *
+     * Name of the failed entity.
+     * 
+ * + * string entity = 2; + * + * @return The bytes for entity. + */ + public com.google.protobuf.ByteString getEntityBytes() { + java.lang.Object ref = entity_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + entity_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * + * + *
+     * Name of the failed entity.
+     * 
+ * + * string entity = 2; + * + * @param value The entity to set. + * @return This builder for chaining. + */ + public Builder setEntity(java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + entity_ = value; + onChanged(); + return this; + } + /** + * + * + *
+     * Name of the failed entity.
+     * 
+ * + * string entity = 2; + * + * @return This builder for chaining. + */ + public Builder clearEntity() { + + entity_ = getDefaultInstance().getEntity(); + onChanged(); + return this; + } + /** + * + * + *
+     * Name of the failed entity.
+     * 
+ * + * string entity = 2; + * + * @param value The bytes for entity to set. + * @return This builder for chaining. + */ + public Builder setEntityBytes(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + entity_ = value; + onChanged(); + return this; + } + + private java.lang.Object errorMessage_ = ""; + /** + * + * + *
+     * Message that describes the error.
+     * 
+ * + * string error_message = 3; + * + * @return The errorMessage. + */ + public java.lang.String getErrorMessage() { + java.lang.Object ref = errorMessage_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + errorMessage_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * + * + *
+     * Message that describes the error.
+     * 
+ * + * string error_message = 3; + * + * @return The bytes for errorMessage. + */ + public com.google.protobuf.ByteString getErrorMessageBytes() { + java.lang.Object ref = errorMessage_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + errorMessage_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * + * + *
+     * Message that describes the error.
+     * 
+ * + * string error_message = 3; + * + * @param value The errorMessage to set. + * @return This builder for chaining. + */ + public Builder setErrorMessage(java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + errorMessage_ = value; + onChanged(); + return this; + } + /** + * + * + *
+     * Message that describes the error.
+     * 
+ * + * string error_message = 3; + * + * @return This builder for chaining. + */ + public Builder clearErrorMessage() { + + errorMessage_ = getDefaultInstance().getErrorMessage(); + onChanged(); + return this; + } + /** + * + * + *
+     * Message that describes the error.
+     * 
+ * + * string error_message = 3; + * + * @param value The bytes for errorMessage to set. + * @return This builder for chaining. + */ + public Builder setErrorMessageBytes(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + errorMessage_ = value; + onChanged(); + return this; + } + + @java.lang.Override + public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + // @@protoc_insertion_point(builder_scope:google.cloud.bigquery.storage.v1beta2.StorageError) + } + + // @@protoc_insertion_point(class_scope:google.cloud.bigquery.storage.v1beta2.StorageError) + private static final com.google.cloud.bigquery.storage.v1beta2.StorageError DEFAULT_INSTANCE; + + static { + DEFAULT_INSTANCE = new com.google.cloud.bigquery.storage.v1beta2.StorageError(); + } + + public static com.google.cloud.bigquery.storage.v1beta2.StorageError getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + @java.lang.Override + public StorageError parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new StorageError(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public com.google.cloud.bigquery.storage.v1beta2.StorageError getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } +} diff --git a/proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/StorageErrorOrBuilder.java b/proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/StorageErrorOrBuilder.java new file mode 100644 index 0000000000..24f7a8eb5e --- /dev/null +++ b/proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/StorageErrorOrBuilder.java @@ -0,0 +1,100 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/cloud/bigquery/storage/v1beta2/storage.proto + +package com.google.cloud.bigquery.storage.v1beta2; + +public interface StorageErrorOrBuilder + extends + // @@protoc_insertion_point(interface_extends:google.cloud.bigquery.storage.v1beta2.StorageError) + com.google.protobuf.MessageOrBuilder { + + /** + * + * + *
+   * BigQuery Storage specific error code.
+   * 
+ * + * .google.cloud.bigquery.storage.v1beta2.StorageError.StorageErrorCode code = 1; + * + * @return The enum numeric value on the wire for code. + */ + int getCodeValue(); + /** + * + * + *
+   * BigQuery Storage specific error code.
+   * 
+ * + * .google.cloud.bigquery.storage.v1beta2.StorageError.StorageErrorCode code = 1; + * + * @return The code. + */ + com.google.cloud.bigquery.storage.v1beta2.StorageError.StorageErrorCode getCode(); + + /** + * + * + *
+   * Name of the failed entity.
+   * 
+ * + * string entity = 2; + * + * @return The entity. + */ + java.lang.String getEntity(); + /** + * + * + *
+   * Name of the failed entity.
+   * 
+ * + * string entity = 2; + * + * @return The bytes for entity. + */ + com.google.protobuf.ByteString getEntityBytes(); + + /** + * + * + *
+   * Message that describes the error.
+   * 
+ * + * string error_message = 3; + * + * @return The errorMessage. + */ + java.lang.String getErrorMessage(); + /** + * + * + *
+   * Message that describes the error.
+   * 
+ * + * string error_message = 3; + * + * @return The bytes for errorMessage. + */ + com.google.protobuf.ByteString getErrorMessageBytes(); +} diff --git a/proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/StorageProto.java b/proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/StorageProto.java index 42d321dd05..1ed9b034de 100644 --- a/proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/StorageProto.java +++ b/proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/StorageProto.java @@ -75,6 +75,10 @@ public static void registerAllExtensions(com.google.protobuf.ExtensionRegistry r internal_static_google_cloud_bigquery_storage_v1beta2_AppendRowsResponse_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_bigquery_storage_v1beta2_AppendRowsResponse_fieldAccessorTable; + static final com.google.protobuf.Descriptors.Descriptor + internal_static_google_cloud_bigquery_storage_v1beta2_AppendRowsResponse_AppendResult_descriptor; + static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_google_cloud_bigquery_storage_v1beta2_AppendRowsResponse_AppendResult_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_bigquery_storage_v1beta2_GetWriteStreamRequest_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable @@ -103,6 +107,10 @@ public static void registerAllExtensions(com.google.protobuf.ExtensionRegistry r internal_static_google_cloud_bigquery_storage_v1beta2_FlushRowsResponse_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_bigquery_storage_v1beta2_FlushRowsResponse_fieldAccessorTable; + static final com.google.protobuf.Descriptors.Descriptor + internal_static_google_cloud_bigquery_storage_v1beta2_StorageError_descriptor; + static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_google_cloud_bigquery_storage_v1beta2_StorageError_fieldAccessorTable; public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { return descriptor; @@ -158,101 +166,114 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { + "teStreamRequest\0225\n\006parent\030\001 \001(\tB%\340A\002\372A\037\n" + "\035bigquery.googleapis.com/Table\022M\n\014write_" + "stream\030\002 \001(\01322.google.cloud.bigquery.sto" - + "rage.v1beta2.WriteStreamB\003\340A\002\"\244\003\n\021Append" + + "rage.v1beta2.WriteStreamB\003\340A\002\"\227\003\n\021Append" + "RowsRequest\022H\n\014write_stream\030\001 \001(\tB2\340A\002\372A" + ",\n*bigquerystorage.googleapis.com/WriteS" + "tream\022+\n\006offset\030\002 \001(\0132\033.google.protobuf." + "Int64Value\022X\n\nproto_rows\030\004 \001(\0132B.google." + "cloud.bigquery.storage.v1beta2.AppendRow" - + "sRequest.ProtoDataH\000\022\035\n\025ignore_unknown_f" - + "ields\030\005 \001(\010\032\226\001\n\tProtoData\022I\n\rwriter_sche" - + "ma\030\001 \001(\01322.google.cloud.bigquery.storage" - + ".v1beta2.ProtoSchema\022>\n\004rows\030\002 \001(\01320.goo" - + "gle.cloud.bigquery.storage.v1beta2.Proto" - + "RowsB\006\n\004rows\"\243\001\n\022AppendRowsResponse\022\020\n\006o" - + "ffset\030\001 \001(\003H\000\022#\n\005error\030\002 \001(\0132\022.google.rp" - + "c.StatusH\000\022J\n\016updated_schema\030\003 \001(\01322.goo" - + "gle.cloud.bigquery.storage.v1beta2.Table" - + "SchemaB\n\n\010response\"Y\n\025GetWriteStreamRequ" - + "est\022@\n\004name\030\001 \001(\tB2\340A\002\372A,\n*bigquerystora" - + "ge.googleapis.com/WriteStream\"Q\n\036BatchCo" - + "mmitWriteStreamsRequest\022\023\n\006parent\030\001 \001(\tB" - + "\003\340A\002\022\032\n\rwrite_streams\030\002 \003(\tB\003\340A\002\"R\n\037Batc" - + "hCommitWriteStreamsResponse\022/\n\013commit_ti" - + "me\030\001 \001(\0132\032.google.protobuf.Timestamp\"^\n\032" - + "FinalizeWriteStreamRequest\022@\n\004name\030\001 \001(\t" - + "B2\340A\002\372A,\n*bigquerystorage.googleapis.com" - + "/WriteStream\"0\n\033FinalizeWriteStreamRespo" - + "nse\022\021\n\trow_count\030\001 \001(\003\"\211\001\n\020FlushRowsRequ" - + "est\022H\n\014write_stream\030\001 \001(\tB2\340A\002\372A,\n*bigqu" - + "erystorage.googleapis.com/WriteStream\022+\n" - + "\006offset\030\002 \001(\0132\033.google.protobuf.Int64Val" - + "ue\"#\n\021FlushRowsResponse\022\016\n\006offset\030\001 \001(\0032" - + "\363\006\n\014BigQueryRead\022\370\001\n\021CreateReadSession\022?" - + ".google.cloud.bigquery.storage.v1beta2.C" - + "reateReadSessionRequest\0322.google.cloud.b" - + "igquery.storage.v1beta2.ReadSession\"n\202\323\344" - + "\223\002A\".google.cloud.bigquery." - + "storage.v1beta2.SplitReadStreamResponse\"" - + "C\202\323\344\223\002=\022;/v1beta2/{name=projects/*/locat" - + "ions/*/sessions/*/streams/*}\032\256\001\312A\036bigque" - + "rystorage.googleapis.com\322A\211\001https://www." - + "googleapis.com/auth/bigquery,https://www" - + ".googleapis.com/auth/bigquery.readonly,h" - + "ttps://www.googleapis.com/auth/cloud-pla" - + "tform2\226\014\n\rBigQueryWrite\022\346\001\n\021CreateWriteS" - + "tream\022?.google.cloud.bigquery.storage.v1" - + "beta2.CreateWriteStreamRequest\0322.google." - + "cloud.bigquery.storage.v1beta2.WriteStre" - + "am\"\\\202\323\344\223\002@\"0/v1beta2/{parent=projects/*/" - + "datasets/*/tables/*}:\014write_stream\332A\023par" - + "ent,write_stream\022\341\001\n\nAppendRows\0228.google" - + ".cloud.bigquery.storage.v1beta2.AppendRo" - + "wsRequest\0329.google.cloud.bigquery.storag" - + "e.v1beta2.AppendRowsResponse\"Z\202\323\344\223\002E\"@/v" - + "1beta2/{write_stream=projects/*/datasets" - + "/*/tables/*/streams/*}:\001*\332A\014write_stream" - + "(\0010\001\022\316\001\n\016GetWriteStream\022<.google.cloud.b" - + "igquery.storage.v1beta2.GetWriteStreamRe" - + "quest\0322.google.cloud.bigquery.storage.v1" - + "beta2.WriteStream\"J\202\323\344\223\002=\"8/v1beta2/{nam" + + "sRequest.ProtoDataH\000\022\020\n\010trace_id\030\006 \001(\t\032\226" + + "\001\n\tProtoData\022I\n\rwriter_schema\030\001 \001(\01322.go" + + "ogle.cloud.bigquery.storage.v1beta2.Prot" + + "oSchema\022>\n\004rows\030\002 \001(\01320.google.cloud.big" + + "query.storage.v1beta2.ProtoRowsB\006\n\004rows\"" + + "\257\002\n\022AppendRowsResponse\022_\n\rappend_result\030" + + "\001 \001(\0132F.google.cloud.bigquery.storage.v1" + + "beta2.AppendRowsResponse.AppendResultH\000\022" + + "#\n\005error\030\002 \001(\0132\022.google.rpc.StatusH\000\022J\n\016" + + "updated_schema\030\003 \001(\01322.google.cloud.bigq" + + "uery.storage.v1beta2.TableSchema\032;\n\014Appe" + + "ndResult\022+\n\006offset\030\001 \001(\0132\033.google.protob" + + "uf.Int64ValueB\n\n\010response\"Y\n\025GetWriteStr" + + "eamRequest\022@\n\004name\030\001 \001(\tB2\340A\002\372A,\n*bigque" + + "rystorage.googleapis.com/WriteStream\"Q\n\036" + + "BatchCommitWriteStreamsRequest\022\023\n\006parent" + + "\030\001 \001(\tB\003\340A\002\022\032\n\rwrite_streams\030\002 \003(\tB\003\340A\002\"" + + "\236\001\n\037BatchCommitWriteStreamsResponse\022/\n\013c" + + "ommit_time\030\001 \001(\0132\032.google.protobuf.Times" + + "tamp\022J\n\rstream_errors\030\002 \003(\01323.google.clo" + + "ud.bigquery.storage.v1beta2.StorageError" + + "\"^\n\032FinalizeWriteStreamRequest\022@\n\004name\030\001" + + " \001(\tB2\340A\002\372A,\n*bigquerystorage.googleapis" + + ".com/WriteStream\"0\n\033FinalizeWriteStreamR" + + "esponse\022\021\n\trow_count\030\001 \001(\003\"\211\001\n\020FlushRows" + + "Request\022H\n\014write_stream\030\001 \001(\tB2\340A\002\372A,\n*b" + + "igquerystorage.googleapis.com/WriteStrea" + + "m\022+\n\006offset\030\002 \001(\0132\033.google.protobuf.Int6" + + "4Value\"#\n\021FlushRowsResponse\022\016\n\006offset\030\001 " + + "\001(\003\"\276\002\n\014StorageError\022R\n\004code\030\001 \001(\0162D.goo" + + "gle.cloud.bigquery.storage.v1beta2.Stora" + + "geError.StorageErrorCode\022\016\n\006entity\030\002 \001(\t" + + "\022\025\n\rerror_message\030\003 \001(\t\"\262\001\n\020StorageError" + + "Code\022\"\n\036STORAGE_ERROR_CODE_UNSPECIFIED\020\000" + + "\022\023\n\017TABLE_NOT_FOUND\020\001\022\034\n\030STREAM_ALREADY_" + + "COMMITTED\020\002\022\024\n\020STREAM_NOT_FOUND\020\003\022\027\n\023INV" + + "ALID_STREAM_TYPE\020\004\022\030\n\024INVALID_STREAM_STA" + + "TE\020\0052\363\006\n\014BigQueryRead\022\370\001\n\021CreateReadSess" + + "ion\022?.google.cloud.bigquery.storage.v1be" + + "ta2.CreateReadSessionRequest\0322.google.cl" + + "oud.bigquery.storage.v1beta2.ReadSession" + + "\"n\202\323\344\223\002A\".google.cloud.bigq" + + "uery.storage.v1beta2.SplitReadStreamResp" + + "onse\"C\202\323\344\223\002=\022;/v1beta2/{name=projects/*/" + + "locations/*/sessions/*/streams/*}\032\256\001\312A\036b" + + "igquerystorage.googleapis.com\322A\211\001https:/" + + "/www.googleapis.com/auth/bigquery,https:" + + "//www.googleapis.com/auth/bigquery.reado" + + "nly,https://www.googleapis.com/auth/clou" + + "d-platform2\226\014\n\rBigQueryWrite\022\346\001\n\021CreateW" + + "riteStream\022?.google.cloud.bigquery.stora" + + "ge.v1beta2.CreateWriteStreamRequest\0322.go" + + "ogle.cloud.bigquery.storage.v1beta2.Writ" + + "eStream\"\\\202\323\344\223\002@\"0/v1beta2/{parent=projec" + + "ts/*/datasets/*/tables/*}:\014write_stream\332" + + "A\023parent,write_stream\022\341\001\n\nAppendRows\0228.g" + + "oogle.cloud.bigquery.storage.v1beta2.App" + + "endRowsRequest\0329.google.cloud.bigquery.s" + + "torage.v1beta2.AppendRowsResponse\"Z\202\323\344\223\002" + + "E\"@/v1beta2/{write_stream=projects/*/dat" + + "asets/*/tables/*/streams/*}:\001*\332A\014write_s" + + "tream(\0010\001\022\316\001\n\016GetWriteStream\022<.google.cl" + + "oud.bigquery.storage.v1beta2.GetWriteStr" + + "eamRequest\0322.google.cloud.bigquery.stora" + + "ge.v1beta2.WriteStream\"J\202\323\344\223\002=\"8/v1beta2" + + "/{name=projects/*/datasets/*/tables/*/st" + + "reams/*}:\001*\332A\004name\022\350\001\n\023FinalizeWriteStre" + + "am\022A.google.cloud.bigquery.storage.v1bet" + + "a2.FinalizeWriteStreamRequest\032B.google.c" + + "loud.bigquery.storage.v1beta2.FinalizeWr" + + "iteStreamResponse\"J\202\323\344\223\002=\"8/v1beta2/{nam" + "e=projects/*/datasets/*/tables/*/streams" - + "/*}:\001*\332A\004name\022\350\001\n\023FinalizeWriteStream\022A." - + "google.cloud.bigquery.storage.v1beta2.Fi" - + "nalizeWriteStreamRequest\032B.google.cloud." - + "bigquery.storage.v1beta2.FinalizeWriteSt" - + "reamResponse\"J\202\323\344\223\002=\"8/v1beta2/{name=pro" - + "jects/*/datasets/*/tables/*/streams/*}:\001" - + "*\332A\004name\022\353\001\n\027BatchCommitWriteStreams\022E.g" - + "oogle.cloud.bigquery.storage.v1beta2.Bat" - + "chCommitWriteStreamsRequest\032F.google.clo" - + "ud.bigquery.storage.v1beta2.BatchCommitW" - + "riteStreamsResponse\"A\202\323\344\223\0022\0220/v1beta2/{p" - + "arent=projects/*/datasets/*/tables/*}\332A\006" - + "parent\022\332\001\n\tFlushRows\0227.google.cloud.bigq" - + "uery.storage.v1beta2.FlushRowsRequest\0328." - + "google.cloud.bigquery.storage.v1beta2.Fl" - + "ushRowsResponse\"Z\202\323\344\223\002E\"@/v1beta2/{write" - + "_stream=projects/*/datasets/*/tables/*/s" - + "treams/*}:\001*\332A\014write_stream\032\260\001\312A\036bigquer" - + "ystorage.googleapis.com\322A\213\001https://www.g" - + "oogleapis.com/auth/bigquery,https://www." - + "googleapis.com/auth/bigquery.insertdata," - + "https://www.googleapis.com/auth/cloud-pl" - + "atformB\211\001\n)com.google.cloud.bigquery.sto" - + "rage.v1beta2B\014StorageProtoP\001ZLgoogle.gol" - + "ang.org/genproto/googleapis/cloud/bigque" - + "ry/storage/v1beta2;storageb\006proto3" + + "/*}:\001*\332A\004name\022\353\001\n\027BatchCommitWriteStream" + + "s\022E.google.cloud.bigquery.storage.v1beta" + + "2.BatchCommitWriteStreamsRequest\032F.googl" + + "e.cloud.bigquery.storage.v1beta2.BatchCo" + + "mmitWriteStreamsResponse\"A\202\323\344\223\0022\0220/v1bet" + + "a2/{parent=projects/*/datasets/*/tables/" + + "*}\332A\006parent\022\332\001\n\tFlushRows\0227.google.cloud" + + ".bigquery.storage.v1beta2.FlushRowsReque" + + "st\0328.google.cloud.bigquery.storage.v1bet" + + "a2.FlushRowsResponse\"Z\202\323\344\223\002E\"@/v1beta2/{" + + "write_stream=projects/*/datasets/*/table" + + "s/*/streams/*}:\001*\332A\014write_stream\032\260\001\312A\036bi" + + "gquerystorage.googleapis.com\322A\213\001https://" + + "www.googleapis.com/auth/bigquery,https:/" + + "/www.googleapis.com/auth/bigquery.insert" + + "data,https://www.googleapis.com/auth/clo" + + "ud-platformB\211\001\n)com.google.cloud.bigquer" + + "y.storage.v1beta2B\014StorageProtoP\001ZLgoogl" + + "e.golang.org/genproto/googleapis/cloud/b" + + "igquery/storage/v1beta2;storageb\006proto3" }; descriptor = com.google.protobuf.Descriptors.FileDescriptor.internalBuildGeneratedFileFrom( @@ -351,7 +372,7 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_bigquery_storage_v1beta2_AppendRowsRequest_descriptor, new java.lang.String[] { - "WriteStream", "Offset", "ProtoRows", "IgnoreUnknownFields", "Rows", + "WriteStream", "Offset", "ProtoRows", "TraceId", "Rows", }); internal_static_google_cloud_bigquery_storage_v1beta2_AppendRowsRequest_ProtoData_descriptor = internal_static_google_cloud_bigquery_storage_v1beta2_AppendRowsRequest_descriptor @@ -369,7 +390,17 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_bigquery_storage_v1beta2_AppendRowsResponse_descriptor, new java.lang.String[] { - "Offset", "Error", "UpdatedSchema", "Response", + "AppendResult", "Error", "UpdatedSchema", "Response", + }); + internal_static_google_cloud_bigquery_storage_v1beta2_AppendRowsResponse_AppendResult_descriptor = + internal_static_google_cloud_bigquery_storage_v1beta2_AppendRowsResponse_descriptor + .getNestedTypes() + .get(0); + internal_static_google_cloud_bigquery_storage_v1beta2_AppendRowsResponse_AppendResult_fieldAccessorTable = + new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_google_cloud_bigquery_storage_v1beta2_AppendRowsResponse_AppendResult_descriptor, + new java.lang.String[] { + "Offset", }); internal_static_google_cloud_bigquery_storage_v1beta2_GetWriteStreamRequest_descriptor = getDescriptor().getMessageTypes().get(10); @@ -393,7 +424,7 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_bigquery_storage_v1beta2_BatchCommitWriteStreamsResponse_descriptor, new java.lang.String[] { - "CommitTime", + "CommitTime", "StreamErrors", }); internal_static_google_cloud_bigquery_storage_v1beta2_FinalizeWriteStreamRequest_descriptor = getDescriptor().getMessageTypes().get(13); @@ -427,6 +458,14 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { new java.lang.String[] { "Offset", }); + internal_static_google_cloud_bigquery_storage_v1beta2_StorageError_descriptor = + getDescriptor().getMessageTypes().get(17); + internal_static_google_cloud_bigquery_storage_v1beta2_StorageError_fieldAccessorTable = + new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_google_cloud_bigquery_storage_v1beta2_StorageError_descriptor, + new java.lang.String[] { + "Code", "Entity", "ErrorMessage", + }); com.google.protobuf.ExtensionRegistry registry = com.google.protobuf.ExtensionRegistry.newInstance(); registry.add(com.google.api.ClientProto.defaultHost); diff --git a/proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/StreamProto.java b/proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/StreamProto.java index 4e65b64e92..c4a2531dcf 100644 --- a/proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/StreamProto.java +++ b/proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/StreamProto.java @@ -93,7 +93,7 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { + "ream\022\021\n\004name\030\001 \001(\tB\003\340A\003:{\352Ax\n)bigqueryst" + "orage.googleapis.com/ReadStream\022Kproject" + "s/{project}/locations/{location}/session" - + "s/{session}/streams/{stream}\"\374\003\n\013WriteSt" + + "s/{session}/streams/{stream}\"\347\003\n\013WriteSt" + "ream\022\021\n\004name\030\001 \001(\tB\003\340A\003\022J\n\004type\030\002 \001(\01627." + "google.cloud.bigquery.storage.v1beta2.Wr" + "iteStream.TypeB\003\340A\005\0224\n\013create_time\030\003 \001(\013" @@ -101,19 +101,19 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { + "mit_time\030\004 \001(\0132\032.google.protobuf.Timesta" + "mpB\003\340A\003\022M\n\014table_schema\030\005 \001(\01322.google.c" + "loud.bigquery.storage.v1beta2.TableSchem" - + "aB\003\340A\003\022\023\n\013external_id\030\006 \001(\t\"F\n\004Type\022\024\n\020T" - + "YPE_UNSPECIFIED\020\000\022\r\n\tCOMMITTED\020\001\022\013\n\007PEND" - + "ING\020\002\022\014\n\010BUFFERED\020\003:v\352As\n*bigquerystorag" - + "e.googleapis.com/WriteStream\022Eprojects/{" - + "project}/datasets/{dataset}/tables/{tabl" - + "e}/streams/{stream}*>\n\nDataFormat\022\033\n\027DAT" - + "A_FORMAT_UNSPECIFIED\020\000\022\010\n\004AVRO\020\001\022\t\n\005ARRO" - + "W\020\002B\340\001\n)com.google.cloud.bigquery.storag" - + "e.v1beta2B\013StreamProtoP\001ZLgoogle.golang." - + "org/genproto/googleapis/cloud/bigquery/s" - + "torage/v1beta2;storage\352AU\n\035bigquery.goog" - + "leapis.com/Table\0224projects/{project}/dat" - + "asets/{dataset}/tables/{table}b\006proto3" + + "aB\003\340A\003\"F\n\004Type\022\024\n\020TYPE_UNSPECIFIED\020\000\022\r\n\t" + + "COMMITTED\020\001\022\013\n\007PENDING\020\002\022\014\n\010BUFFERED\020\003:v" + + "\352As\n*bigquerystorage.googleapis.com/Writ" + + "eStream\022Eprojects/{project}/datasets/{da" + + "taset}/tables/{table}/streams/{stream}*>" + + "\n\nDataFormat\022\033\n\027DATA_FORMAT_UNSPECIFIED\020" + + "\000\022\010\n\004AVRO\020\001\022\t\n\005ARROW\020\002B\340\001\n)com.google.cl" + + "oud.bigquery.storage.v1beta2B\013StreamProt" + + "oP\001ZLgoogle.golang.org/genproto/googleap" + + "is/cloud/bigquery/storage/v1beta2;storag" + + "e\352AU\n\035bigquery.googleapis.com/Table\0224pro" + + "jects/{project}/datasets/{dataset}/table" + + "s/{table}b\006proto3" }; descriptor = com.google.protobuf.Descriptors.FileDescriptor.internalBuildGeneratedFileFrom( @@ -177,7 +177,7 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_bigquery_storage_v1beta2_WriteStream_descriptor, new java.lang.String[] { - "Name", "Type", "CreateTime", "CommitTime", "TableSchema", "ExternalId", + "Name", "Type", "CreateTime", "CommitTime", "TableSchema", }); com.google.protobuf.ExtensionRegistry registry = com.google.protobuf.ExtensionRegistry.newInstance(); diff --git a/proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/WriteStream.java b/proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/WriteStream.java index 1c5fa0448c..a06a4952ee 100644 --- a/proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/WriteStream.java +++ b/proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/WriteStream.java @@ -40,7 +40,6 @@ private WriteStream(com.google.protobuf.GeneratedMessageV3.Builder builder) { private WriteStream() { name_ = ""; type_ = 0; - externalId_ = ""; } @java.lang.Override @@ -131,13 +130,6 @@ private WriteStream( tableSchema_ = subBuilder.buildPartial(); } - break; - } - case 50: - { - java.lang.String s = input.readStringRequireUtf8(); - - externalId_ = s; break; } default: @@ -623,55 +615,6 @@ public com.google.cloud.bigquery.storage.v1beta2.TableSchemaOrBuilder getTableSc return getTableSchema(); } - public static final int EXTERNAL_ID_FIELD_NUMBER = 6; - private volatile java.lang.Object externalId_; - /** - * - * - *
-   * Id set by client to annotate its identity.
-   * 
- * - * string external_id = 6; - * - * @return The externalId. - */ - @java.lang.Override - public java.lang.String getExternalId() { - java.lang.Object ref = externalId_; - if (ref instanceof java.lang.String) { - return (java.lang.String) ref; - } else { - com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - externalId_ = s; - return s; - } - } - /** - * - * - *
-   * Id set by client to annotate its identity.
-   * 
- * - * string external_id = 6; - * - * @return The bytes for externalId. - */ - @java.lang.Override - public com.google.protobuf.ByteString getExternalIdBytes() { - java.lang.Object ref = externalId_; - if (ref instanceof java.lang.String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); - externalId_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - private byte memoizedIsInitialized = -1; @java.lang.Override @@ -703,9 +646,6 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io if (tableSchema_ != null) { output.writeMessage(5, getTableSchema()); } - if (!getExternalIdBytes().isEmpty()) { - com.google.protobuf.GeneratedMessageV3.writeString(output, 6, externalId_); - } unknownFields.writeTo(output); } @@ -732,9 +672,6 @@ public int getSerializedSize() { if (tableSchema_ != null) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(5, getTableSchema()); } - if (!getExternalIdBytes().isEmpty()) { - size += com.google.protobuf.GeneratedMessageV3.computeStringSize(6, externalId_); - } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; @@ -765,7 +702,6 @@ public boolean equals(final java.lang.Object obj) { if (hasTableSchema()) { if (!getTableSchema().equals(other.getTableSchema())) return false; } - if (!getExternalId().equals(other.getExternalId())) return false; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @@ -793,8 +729,6 @@ public int hashCode() { hash = (37 * hash) + TABLE_SCHEMA_FIELD_NUMBER; hash = (53 * hash) + getTableSchema().hashCode(); } - hash = (37 * hash) + EXTERNAL_ID_FIELD_NUMBER; - hash = (53 * hash) + getExternalId().hashCode(); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; @@ -963,8 +897,6 @@ public Builder clear() { tableSchema_ = null; tableSchemaBuilder_ = null; } - externalId_ = ""; - return this; } @@ -1009,7 +941,6 @@ public com.google.cloud.bigquery.storage.v1beta2.WriteStream buildPartial() { } else { result.tableSchema_ = tableSchemaBuilder_.build(); } - result.externalId_ = externalId_; onBuilt(); return result; } @@ -1076,10 +1007,6 @@ public Builder mergeFrom(com.google.cloud.bigquery.storage.v1beta2.WriteStream o if (other.hasTableSchema()) { mergeTableSchema(other.getTableSchema()); } - if (!other.getExternalId().isEmpty()) { - externalId_ = other.externalId_; - onChanged(); - } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; @@ -1994,112 +1921,6 @@ public com.google.cloud.bigquery.storage.v1beta2.TableSchema.Builder getTableSch return tableSchemaBuilder_; } - private java.lang.Object externalId_ = ""; - /** - * - * - *
-     * Id set by client to annotate its identity.
-     * 
- * - * string external_id = 6; - * - * @return The externalId. - */ - public java.lang.String getExternalId() { - java.lang.Object ref = externalId_; - if (!(ref instanceof java.lang.String)) { - com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - externalId_ = s; - return s; - } else { - return (java.lang.String) ref; - } - } - /** - * - * - *
-     * Id set by client to annotate its identity.
-     * 
- * - * string external_id = 6; - * - * @return The bytes for externalId. - */ - public com.google.protobuf.ByteString getExternalIdBytes() { - java.lang.Object ref = externalId_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); - externalId_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - /** - * - * - *
-     * Id set by client to annotate its identity.
-     * 
- * - * string external_id = 6; - * - * @param value The externalId to set. - * @return This builder for chaining. - */ - public Builder setExternalId(java.lang.String value) { - if (value == null) { - throw new NullPointerException(); - } - - externalId_ = value; - onChanged(); - return this; - } - /** - * - * - *
-     * Id set by client to annotate its identity.
-     * 
- * - * string external_id = 6; - * - * @return This builder for chaining. - */ - public Builder clearExternalId() { - - externalId_ = getDefaultInstance().getExternalId(); - onChanged(); - return this; - } - /** - * - * - *
-     * Id set by client to annotate its identity.
-     * 
- * - * string external_id = 6; - * - * @param value The bytes for externalId to set. - * @return This builder for chaining. - */ - public Builder setExternalIdBytes(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - checkByteStringIsUtf8(value); - - externalId_ = value; - onChanged(); - return this; - } - @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); diff --git a/proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/WriteStreamOrBuilder.java b/proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/WriteStreamOrBuilder.java index df1ed42eb3..ec38e1c726 100644 --- a/proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/WriteStreamOrBuilder.java +++ b/proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/WriteStreamOrBuilder.java @@ -216,29 +216,4 @@ public interface WriteStreamOrBuilder * */ com.google.cloud.bigquery.storage.v1beta2.TableSchemaOrBuilder getTableSchemaOrBuilder(); - - /** - * - * - *
-   * Id set by client to annotate its identity.
-   * 
- * - * string external_id = 6; - * - * @return The externalId. - */ - java.lang.String getExternalId(); - /** - * - * - *
-   * Id set by client to annotate its identity.
-   * 
- * - * string external_id = 6; - * - * @return The bytes for externalId. - */ - com.google.protobuf.ByteString getExternalIdBytes(); } diff --git a/proto-google-cloud-bigquerystorage-v1beta2/src/main/proto/google/cloud/bigquery/storage/v1beta2/storage.proto b/proto-google-cloud-bigquerystorage-v1beta2/src/main/proto/google/cloud/bigquery/storage/v1beta2/storage.proto index d1573bef31..5538e29f28 100644 --- a/proto-google-cloud-bigquerystorage-v1beta2/src/main/proto/google/cloud/bigquery/storage/v1beta2/storage.proto +++ b/proto-google-cloud-bigquerystorage-v1beta2/src/main/proto/google/cloud/bigquery/storage/v1beta2/storage.proto @@ -387,20 +387,37 @@ message AppendRowsRequest { ProtoData proto_rows = 4; } - // Only initial request setting is respected. If true, drop unknown input - // fields. Otherwise, the extra fields will cause append to fail. Default - // value is false. - bool ignore_unknown_fields = 5; + // Id set by client to annotate its identity. Only initial request setting is + // respected. + string trace_id = 6; } // Response message for `AppendRows`. message AppendRowsResponse { - oneof response { - // The row offset at which the last append occurred. - int64 offset = 1; + // A success append result. + message AppendResult { + // The row offset at which the last append occurred. The offset will not be + // set if appending using default streams. + google.protobuf.Int64Value offset = 1; + } - // Error in case of append failure. If set, it means rows are not accepted - // into the system. Users can retry within the same connection. + oneof response { + // Result if the append is successful. + AppendResult append_result = 1; + + // Error in case of request failed. If set, it means rows are not accepted + // into the system. Users can retry or continue with other requests within + // the same connection. + // ALREADY_EXISTS: happens when offset is specified, it means the entire + // request is already appended, it is safe to ignore this error. + // OUT_OF_RANGE: happens when offset is specified, it means the specified + // offset is beyond the end of the stream. + // INVALID_ARGUMENT: error caused by malformed request or data. + // RESOURCE_EXHAUSTED: request rejected due to throttling. Only happens when + // append without offset. + // ABORTED: request processing is aborted because of prior failures, request + // can be retried if previous failure is fixed. + // INTERNAL: server side errors that can be retried. google.rpc.Status error = 2; } @@ -435,7 +452,12 @@ message BatchCommitWriteStreamsRequest { // Response message for `BatchCommitWriteStreams`. message BatchCommitWriteStreamsResponse { // The time at which streams were committed in microseconds granularity. + // This field will only exist when there is no stream errors. google.protobuf.Timestamp commit_time = 1; + + // Stream level error if commit failed. Only streams with error will be in + // the list. + repeated StorageError stream_errors = 2; } // Request message for invoking `FinalizeWriteStream`. @@ -476,3 +498,41 @@ message FlushRowsResponse { // The rows before this offset (including this offset) are flushed. int64 offset = 1; } + +// Structured custom BigQuery Storage error message. The error can be attached +// as error details in the returned rpc Status. User can use the info to process +// errors in a structural way, rather than having to parse error messages. +message StorageError { + // Error code for `StorageError`. + enum StorageErrorCode { + // Default error. + STORAGE_ERROR_CODE_UNSPECIFIED = 0; + + // Table is not found in the system. + TABLE_NOT_FOUND = 1; + + // Stream is already committed. + STREAM_ALREADY_COMMITTED = 2; + + // Stream is not found. + STREAM_NOT_FOUND = 3; + + // Invalid Stream type. + // For example, you try to commit a stream that is not pending. + INVALID_STREAM_TYPE = 4; + + // Invalid Stream state. + // For example, you try to commit a stream that is not fianlized or is + // garbaged. + INVALID_STREAM_STATE = 5; + } + + // BigQuery Storage specific error code. + StorageErrorCode code = 1; + + // Name of the failed entity. + string entity = 2; + + // Message that describes the error. + string error_message = 3; +} diff --git a/proto-google-cloud-bigquerystorage-v1beta2/src/main/proto/google/cloud/bigquery/storage/v1beta2/stream.proto b/proto-google-cloud-bigquerystorage-v1beta2/src/main/proto/google/cloud/bigquery/storage/v1beta2/stream.proto index 1c162d9d76..2b0a58c95a 100644 --- a/proto-google-cloud-bigquerystorage-v1beta2/src/main/proto/google/cloud/bigquery/storage/v1beta2/stream.proto +++ b/proto-google-cloud-bigquerystorage-v1beta2/src/main/proto/google/cloud/bigquery/storage/v1beta2/stream.proto @@ -186,7 +186,4 @@ message WriteStream { // compatible with this schema to send in initial `AppendRowsRequest`. // The table schema could go out of date during the life time of the stream. TableSchema table_schema = 5 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Id set by client to annotate its identity. - string external_id = 6; } diff --git a/synth.metadata b/synth.metadata index f131b56899..27d26fe9d5 100644 --- a/synth.metadata +++ b/synth.metadata @@ -18,32 +18,32 @@ "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "e39e42f368d236203a774ee994fcb4d730c33a83", - "internalRef": "345311069" + "sha": "b53c4d98aab1eae3dac90b37019dede686782f13", + "internalRef": "345469340" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "e39e42f368d236203a774ee994fcb4d730c33a83", - "internalRef": "345311069" + "sha": "b53c4d98aab1eae3dac90b37019dede686782f13", + "internalRef": "345469340" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "e39e42f368d236203a774ee994fcb4d730c33a83", - "internalRef": "345311069" + "sha": "b53c4d98aab1eae3dac90b37019dede686782f13", + "internalRef": "345469340" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "e39e42f368d236203a774ee994fcb4d730c33a83", - "internalRef": "345311069" + "sha": "b53c4d98aab1eae3dac90b37019dede686782f13", + "internalRef": "345469340" } }, { @@ -327,6 +327,8 @@ "proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/SplitReadStreamRequestOrBuilder.java", "proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/SplitReadStreamResponse.java", "proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/SplitReadStreamResponseOrBuilder.java", + "proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/StorageError.java", + "proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/StorageErrorOrBuilder.java", "proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/StorageProto.java", "proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/StreamProto.java", "proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/StreamStats.java", From 4b537f43ff6c4c19e9817fafdecb1a6904d457b9 Mon Sep 17 00:00:00 2001 From: yoshi-automation Date: Fri, 11 Dec 2020 16:28:26 -0800 Subject: [PATCH 3/6] fix: Update gapic-generator-java to 0.0.7 Committer: @miraleung PiperOrigin-RevId: 345476969 Source-Author: Google APIs Source-Date: Thu Dec 3 10:07:32 2020 -0800 Source-Repo: googleapis/googleapis Source-Sha: 7be2c821dd88109038c55c89f7dd48f092eeab9d Source-Link: https://github.com/googleapis/googleapis/commit/7be2c821dd88109038c55c89f7dd48f092eeab9d --- .../storage/v1/BaseBigQueryReadClient.java | 3 +-- .../storage/v1alpha2/BigQueryWriteClient.java | 11 +++++------ .../v1beta1/BaseBigQueryStorageClient.java | 3 +-- .../storage/v1beta2/BaseBigQueryReadClient.java | 3 +-- .../storage/v1beta2/BigQueryWriteClient.java | 11 ++++------- .../cloud/bigquery/storage/v1/ProjectName.java | 8 ++++---- .../bigquery/storage/v1/ReadStreamName.java | 14 +++++++------- .../bigquery/storage/v1alpha2/TableName.java | 12 ++++++------ .../storage/v1alpha2/WriteStreamName.java | 14 +++++++------- .../bigquery/storage/v1beta1/ProjectName.java | 8 ++++---- .../bigquery/storage/v1beta2/ProjectName.java | 8 ++++---- .../bigquery/storage/v1beta2/ReadStreamName.java | 14 +++++++------- .../bigquery/storage/v1beta2/TableName.java | 12 ++++++------ .../storage/v1beta2/WriteStreamName.java | 14 +++++++------- synth.metadata | 16 ++++++++-------- 15 files changed, 72 insertions(+), 79 deletions(-) diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/BaseBigQueryReadClient.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/BaseBigQueryReadClient.java index de3f88a6d8..cce27f0e15 100644 --- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/BaseBigQueryReadClient.java +++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/BaseBigQueryReadClient.java @@ -23,7 +23,6 @@ import com.google.cloud.bigquery.storage.v1.stub.BigQueryReadStub; import com.google.cloud.bigquery.storage.v1.stub.BigQueryReadStubSettings; import java.io.IOException; -import java.util.Objects; import java.util.concurrent.TimeUnit; import javax.annotation.Generated; @@ -173,7 +172,7 @@ public final ReadSession createReadSession( ProjectName parent, ReadSession readSession, int maxStreamCount) { CreateReadSessionRequest request = CreateReadSessionRequest.newBuilder() - .setParent(Objects.isNull(parent) ? null : parent.toString()) + .setParent(parent == null ? null : parent.toString()) .setReadSession(readSession) .setMaxStreamCount(maxStreamCount) .build(); diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/BigQueryWriteClient.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/BigQueryWriteClient.java index 5f23a07a74..6f23b528ca 100644 --- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/BigQueryWriteClient.java +++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/BigQueryWriteClient.java @@ -23,7 +23,6 @@ import com.google.cloud.bigquery.storage.v1alpha2.stub.BigQueryWriteStub; import com.google.cloud.bigquery.storage.v1alpha2.stub.BigQueryWriteStubSettings; import java.io.IOException; -import java.util.Objects; import java.util.concurrent.TimeUnit; import javax.annotation.Generated; @@ -149,7 +148,7 @@ public final Stream.WriteStream createWriteStream( TableName parent, Stream.WriteStream writeStream) { Storage.CreateWriteStreamRequest request = Storage.CreateWriteStreamRequest.newBuilder() - .setParent(Objects.isNull(parent) ? null : parent.toString()) + .setParent(parent == null ? null : parent.toString()) .setWriteStream(writeStream) .build(); return createWriteStream(request); @@ -232,7 +231,7 @@ public final Stream.WriteStream createWriteStream(Storage.CreateWriteStreamReque public final Stream.WriteStream getWriteStream(WriteStreamName name) { Storage.GetWriteStreamRequest request = Storage.GetWriteStreamRequest.newBuilder() - .setName(Objects.isNull(name) ? null : name.toString()) + .setName(name == null ? null : name.toString()) .build(); return getWriteStream(request); } @@ -284,7 +283,7 @@ public final Stream.WriteStream getWriteStream(Storage.GetWriteStreamRequest req public final Storage.FinalizeWriteStreamResponse finalizeWriteStream(WriteStreamName name) { Storage.FinalizeWriteStreamRequest request = Storage.FinalizeWriteStreamRequest.newBuilder() - .setName(Objects.isNull(name) ? null : name.toString()) + .setName(name == null ? null : name.toString()) .build(); return finalizeWriteStream(request); } @@ -340,7 +339,7 @@ public final Storage.FinalizeWriteStreamResponse finalizeWriteStream( public final Storage.BatchCommitWriteStreamsResponse batchCommitWriteStreams(TableName parent) { Storage.BatchCommitWriteStreamsRequest request = Storage.BatchCommitWriteStreamsRequest.newBuilder() - .setParent(Objects.isNull(parent) ? null : parent.toString()) + .setParent(parent == null ? null : parent.toString()) .build(); return batchCommitWriteStreams(request); } @@ -402,7 +401,7 @@ public final Storage.BatchCommitWriteStreamsResponse batchCommitWriteStreams( public final Storage.FlushRowsResponse flushRows(WriteStreamName writeStream) { Storage.FlushRowsRequest request = Storage.FlushRowsRequest.newBuilder() - .setWriteStream(Objects.isNull(writeStream) ? null : writeStream.toString()) + .setWriteStream(writeStream == null ? null : writeStream.toString()) .build(); return flushRows(request); } diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta1/BaseBigQueryStorageClient.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta1/BaseBigQueryStorageClient.java index 5528718f0d..acafbdcbf0 100644 --- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta1/BaseBigQueryStorageClient.java +++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta1/BaseBigQueryStorageClient.java @@ -24,7 +24,6 @@ import com.google.cloud.bigquery.storage.v1beta1.stub.BigQueryStorageStubSettings; import com.google.protobuf.Empty; import java.io.IOException; -import java.util.Objects; import java.util.concurrent.TimeUnit; import javax.annotation.Generated; @@ -168,7 +167,7 @@ public final Storage.ReadSession createReadSession( Storage.CreateReadSessionRequest request = Storage.CreateReadSessionRequest.newBuilder() .setTableReference(tableReference) - .setParent(Objects.isNull(parent) ? null : parent.toString()) + .setParent(parent == null ? null : parent.toString()) .setRequestedStreams(requestedStreams) .build(); return createReadSession(request); diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/BaseBigQueryReadClient.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/BaseBigQueryReadClient.java index 73f4c14477..fc42c63e5f 100644 --- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/BaseBigQueryReadClient.java +++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/BaseBigQueryReadClient.java @@ -23,7 +23,6 @@ import com.google.cloud.bigquery.storage.v1beta2.stub.BigQueryReadStub; import com.google.cloud.bigquery.storage.v1beta2.stub.BigQueryReadStubSettings; import java.io.IOException; -import java.util.Objects; import java.util.concurrent.TimeUnit; import javax.annotation.Generated; @@ -176,7 +175,7 @@ public final ReadSession createReadSession( ProjectName parent, ReadSession readSession, int maxStreamCount) { CreateReadSessionRequest request = CreateReadSessionRequest.newBuilder() - .setParent(Objects.isNull(parent) ? null : parent.toString()) + .setParent(parent == null ? null : parent.toString()) .setReadSession(readSession) .setMaxStreamCount(maxStreamCount) .build(); diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/BigQueryWriteClient.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/BigQueryWriteClient.java index 32515d9178..a7c464f995 100644 --- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/BigQueryWriteClient.java +++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/BigQueryWriteClient.java @@ -23,7 +23,6 @@ import com.google.cloud.bigquery.storage.v1beta2.stub.BigQueryWriteStub; import com.google.cloud.bigquery.storage.v1beta2.stub.BigQueryWriteStubSettings; import java.io.IOException; -import java.util.Objects; import java.util.concurrent.TimeUnit; import javax.annotation.Generated; @@ -152,7 +151,7 @@ public BigQueryWriteStub getStub() { public final WriteStream createWriteStream(TableName parent, WriteStream writeStream) { CreateWriteStreamRequest request = CreateWriteStreamRequest.newBuilder() - .setParent(Objects.isNull(parent) ? null : parent.toString()) + .setParent(parent == null ? null : parent.toString()) .setWriteStream(writeStream) .build(); return createWriteStream(request); @@ -241,9 +240,7 @@ public final BidiStreamingCallable append */ public final WriteStream getWriteStream(WriteStreamName name) { GetWriteStreamRequest request = - GetWriteStreamRequest.newBuilder() - .setName(Objects.isNull(name) ? null : name.toString()) - .build(); + GetWriteStreamRequest.newBuilder().setName(name == null ? null : name.toString()).build(); return getWriteStream(request); } @@ -293,7 +290,7 @@ public final UnaryCallable getWriteStreamCal public final FinalizeWriteStreamResponse finalizeWriteStream(WriteStreamName name) { FinalizeWriteStreamRequest request = FinalizeWriteStreamRequest.newBuilder() - .setName(Objects.isNull(name) ? null : name.toString()) + .setName(name == null ? null : name.toString()) .build(); return finalizeWriteStream(request); } @@ -393,7 +390,7 @@ public final BatchCommitWriteStreamsResponse batchCommitWriteStreams( public final FlushRowsResponse flushRows(WriteStreamName writeStream) { FlushRowsRequest request = FlushRowsRequest.newBuilder() - .setWriteStream(Objects.isNull(writeStream) ? null : writeStream.toString()) + .setWriteStream(writeStream == null ? null : writeStream.toString()) .build(); return flushRows(request); } diff --git a/proto-google-cloud-bigquerystorage-v1/src/main/java/com/google/cloud/bigquery/storage/v1/ProjectName.java b/proto-google-cloud-bigquerystorage-v1/src/main/java/com/google/cloud/bigquery/storage/v1/ProjectName.java index f87488cdd7..e138c838d1 100644 --- a/proto-google-cloud-bigquerystorage-v1/src/main/java/com/google/cloud/bigquery/storage/v1/ProjectName.java +++ b/proto-google-cloud-bigquerystorage-v1/src/main/java/com/google/cloud/bigquery/storage/v1/ProjectName.java @@ -84,7 +84,7 @@ public static List parseList(List formattedStrings) { public static List toStringList(List values) { List list = new ArrayList<>(values.size()); for (ProjectName value : values) { - if (Objects.isNull(value)) { + if (value == null) { list.add(""); } else { list.add(value.toString()); @@ -99,11 +99,11 @@ public static boolean isParsableFrom(String formattedString) { @Override public Map getFieldValuesMap() { - if (Objects.isNull(fieldValuesMap)) { + if (fieldValuesMap == null) { synchronized (this) { - if (Objects.isNull(fieldValuesMap)) { + if (fieldValuesMap == null) { ImmutableMap.Builder fieldMapBuilder = ImmutableMap.builder(); - if (!Objects.isNull(project)) { + if (project != null) { fieldMapBuilder.put("project", project); } fieldValuesMap = fieldMapBuilder.build(); diff --git a/proto-google-cloud-bigquerystorage-v1/src/main/java/com/google/cloud/bigquery/storage/v1/ReadStreamName.java b/proto-google-cloud-bigquerystorage-v1/src/main/java/com/google/cloud/bigquery/storage/v1/ReadStreamName.java index 40301ee1f2..8c68ce74b7 100644 --- a/proto-google-cloud-bigquerystorage-v1/src/main/java/com/google/cloud/bigquery/storage/v1/ReadStreamName.java +++ b/proto-google-cloud-bigquerystorage-v1/src/main/java/com/google/cloud/bigquery/storage/v1/ReadStreamName.java @@ -121,7 +121,7 @@ public static List parseList(List formattedStrings) { public static List toStringList(List values) { List list = new ArrayList<>(values.size()); for (ReadStreamName value : values) { - if (Objects.isNull(value)) { + if (value == null) { list.add(""); } else { list.add(value.toString()); @@ -136,20 +136,20 @@ public static boolean isParsableFrom(String formattedString) { @Override public Map getFieldValuesMap() { - if (Objects.isNull(fieldValuesMap)) { + if (fieldValuesMap == null) { synchronized (this) { - if (Objects.isNull(fieldValuesMap)) { + if (fieldValuesMap == null) { ImmutableMap.Builder fieldMapBuilder = ImmutableMap.builder(); - if (!Objects.isNull(project)) { + if (project != null) { fieldMapBuilder.put("project", project); } - if (!Objects.isNull(location)) { + if (location != null) { fieldMapBuilder.put("location", location); } - if (!Objects.isNull(session)) { + if (session != null) { fieldMapBuilder.put("session", session); } - if (!Objects.isNull(stream)) { + if (stream != null) { fieldMapBuilder.put("stream", stream); } fieldValuesMap = fieldMapBuilder.build(); diff --git a/proto-google-cloud-bigquerystorage-v1alpha2/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/TableName.java b/proto-google-cloud-bigquerystorage-v1alpha2/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/TableName.java index 2041be7e62..2a1d43d92b 100644 --- a/proto-google-cloud-bigquerystorage-v1alpha2/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/TableName.java +++ b/proto-google-cloud-bigquerystorage-v1alpha2/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/TableName.java @@ -98,7 +98,7 @@ public static List parseList(List formattedStrings) { public static List toStringList(List values) { List list = new ArrayList<>(values.size()); for (TableName value : values) { - if (Objects.isNull(value)) { + if (value == null) { list.add(""); } else { list.add(value.toString()); @@ -113,17 +113,17 @@ public static boolean isParsableFrom(String formattedString) { @Override public Map getFieldValuesMap() { - if (Objects.isNull(fieldValuesMap)) { + if (fieldValuesMap == null) { synchronized (this) { - if (Objects.isNull(fieldValuesMap)) { + if (fieldValuesMap == null) { ImmutableMap.Builder fieldMapBuilder = ImmutableMap.builder(); - if (!Objects.isNull(project)) { + if (project != null) { fieldMapBuilder.put("project", project); } - if (!Objects.isNull(dataset)) { + if (dataset != null) { fieldMapBuilder.put("dataset", dataset); } - if (!Objects.isNull(table)) { + if (table != null) { fieldMapBuilder.put("table", table); } fieldValuesMap = fieldMapBuilder.build(); diff --git a/proto-google-cloud-bigquerystorage-v1alpha2/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/WriteStreamName.java b/proto-google-cloud-bigquerystorage-v1alpha2/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/WriteStreamName.java index 2ef24c19fa..7b2430f06a 100644 --- a/proto-google-cloud-bigquerystorage-v1alpha2/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/WriteStreamName.java +++ b/proto-google-cloud-bigquerystorage-v1alpha2/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/WriteStreamName.java @@ -121,7 +121,7 @@ public static List parseList(List formattedStrings) { public static List toStringList(List values) { List list = new ArrayList<>(values.size()); for (WriteStreamName value : values) { - if (Objects.isNull(value)) { + if (value == null) { list.add(""); } else { list.add(value.toString()); @@ -136,20 +136,20 @@ public static boolean isParsableFrom(String formattedString) { @Override public Map getFieldValuesMap() { - if (Objects.isNull(fieldValuesMap)) { + if (fieldValuesMap == null) { synchronized (this) { - if (Objects.isNull(fieldValuesMap)) { + if (fieldValuesMap == null) { ImmutableMap.Builder fieldMapBuilder = ImmutableMap.builder(); - if (!Objects.isNull(project)) { + if (project != null) { fieldMapBuilder.put("project", project); } - if (!Objects.isNull(dataset)) { + if (dataset != null) { fieldMapBuilder.put("dataset", dataset); } - if (!Objects.isNull(table)) { + if (table != null) { fieldMapBuilder.put("table", table); } - if (!Objects.isNull(stream)) { + if (stream != null) { fieldMapBuilder.put("stream", stream); } fieldValuesMap = fieldMapBuilder.build(); diff --git a/proto-google-cloud-bigquerystorage-v1beta1/src/main/java/com/google/cloud/bigquery/storage/v1beta1/ProjectName.java b/proto-google-cloud-bigquerystorage-v1beta1/src/main/java/com/google/cloud/bigquery/storage/v1beta1/ProjectName.java index 4f0ef431f0..61336b2620 100644 --- a/proto-google-cloud-bigquerystorage-v1beta1/src/main/java/com/google/cloud/bigquery/storage/v1beta1/ProjectName.java +++ b/proto-google-cloud-bigquerystorage-v1beta1/src/main/java/com/google/cloud/bigquery/storage/v1beta1/ProjectName.java @@ -84,7 +84,7 @@ public static List parseList(List formattedStrings) { public static List toStringList(List values) { List list = new ArrayList<>(values.size()); for (ProjectName value : values) { - if (Objects.isNull(value)) { + if (value == null) { list.add(""); } else { list.add(value.toString()); @@ -99,11 +99,11 @@ public static boolean isParsableFrom(String formattedString) { @Override public Map getFieldValuesMap() { - if (Objects.isNull(fieldValuesMap)) { + if (fieldValuesMap == null) { synchronized (this) { - if (Objects.isNull(fieldValuesMap)) { + if (fieldValuesMap == null) { ImmutableMap.Builder fieldMapBuilder = ImmutableMap.builder(); - if (!Objects.isNull(project)) { + if (project != null) { fieldMapBuilder.put("project", project); } fieldValuesMap = fieldMapBuilder.build(); diff --git a/proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/ProjectName.java b/proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/ProjectName.java index 60fd9bf737..e6400237ee 100644 --- a/proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/ProjectName.java +++ b/proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/ProjectName.java @@ -84,7 +84,7 @@ public static List parseList(List formattedStrings) { public static List toStringList(List values) { List list = new ArrayList<>(values.size()); for (ProjectName value : values) { - if (Objects.isNull(value)) { + if (value == null) { list.add(""); } else { list.add(value.toString()); @@ -99,11 +99,11 @@ public static boolean isParsableFrom(String formattedString) { @Override public Map getFieldValuesMap() { - if (Objects.isNull(fieldValuesMap)) { + if (fieldValuesMap == null) { synchronized (this) { - if (Objects.isNull(fieldValuesMap)) { + if (fieldValuesMap == null) { ImmutableMap.Builder fieldMapBuilder = ImmutableMap.builder(); - if (!Objects.isNull(project)) { + if (project != null) { fieldMapBuilder.put("project", project); } fieldValuesMap = fieldMapBuilder.build(); diff --git a/proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/ReadStreamName.java b/proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/ReadStreamName.java index 0533d6d397..eab784f8ac 100644 --- a/proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/ReadStreamName.java +++ b/proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/ReadStreamName.java @@ -121,7 +121,7 @@ public static List parseList(List formattedStrings) { public static List toStringList(List values) { List list = new ArrayList<>(values.size()); for (ReadStreamName value : values) { - if (Objects.isNull(value)) { + if (value == null) { list.add(""); } else { list.add(value.toString()); @@ -136,20 +136,20 @@ public static boolean isParsableFrom(String formattedString) { @Override public Map getFieldValuesMap() { - if (Objects.isNull(fieldValuesMap)) { + if (fieldValuesMap == null) { synchronized (this) { - if (Objects.isNull(fieldValuesMap)) { + if (fieldValuesMap == null) { ImmutableMap.Builder fieldMapBuilder = ImmutableMap.builder(); - if (!Objects.isNull(project)) { + if (project != null) { fieldMapBuilder.put("project", project); } - if (!Objects.isNull(location)) { + if (location != null) { fieldMapBuilder.put("location", location); } - if (!Objects.isNull(session)) { + if (session != null) { fieldMapBuilder.put("session", session); } - if (!Objects.isNull(stream)) { + if (stream != null) { fieldMapBuilder.put("stream", stream); } fieldValuesMap = fieldMapBuilder.build(); diff --git a/proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/TableName.java b/proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/TableName.java index 2d4287f1d1..a90e8b3d16 100644 --- a/proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/TableName.java +++ b/proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/TableName.java @@ -98,7 +98,7 @@ public static List parseList(List formattedStrings) { public static List toStringList(List values) { List list = new ArrayList<>(values.size()); for (TableName value : values) { - if (Objects.isNull(value)) { + if (value == null) { list.add(""); } else { list.add(value.toString()); @@ -113,17 +113,17 @@ public static boolean isParsableFrom(String formattedString) { @Override public Map getFieldValuesMap() { - if (Objects.isNull(fieldValuesMap)) { + if (fieldValuesMap == null) { synchronized (this) { - if (Objects.isNull(fieldValuesMap)) { + if (fieldValuesMap == null) { ImmutableMap.Builder fieldMapBuilder = ImmutableMap.builder(); - if (!Objects.isNull(project)) { + if (project != null) { fieldMapBuilder.put("project", project); } - if (!Objects.isNull(dataset)) { + if (dataset != null) { fieldMapBuilder.put("dataset", dataset); } - if (!Objects.isNull(table)) { + if (table != null) { fieldMapBuilder.put("table", table); } fieldValuesMap = fieldMapBuilder.build(); diff --git a/proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/WriteStreamName.java b/proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/WriteStreamName.java index d5325dadca..b3392aeadc 100644 --- a/proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/WriteStreamName.java +++ b/proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/WriteStreamName.java @@ -121,7 +121,7 @@ public static List parseList(List formattedStrings) { public static List toStringList(List values) { List list = new ArrayList<>(values.size()); for (WriteStreamName value : values) { - if (Objects.isNull(value)) { + if (value == null) { list.add(""); } else { list.add(value.toString()); @@ -136,20 +136,20 @@ public static boolean isParsableFrom(String formattedString) { @Override public Map getFieldValuesMap() { - if (Objects.isNull(fieldValuesMap)) { + if (fieldValuesMap == null) { synchronized (this) { - if (Objects.isNull(fieldValuesMap)) { + if (fieldValuesMap == null) { ImmutableMap.Builder fieldMapBuilder = ImmutableMap.builder(); - if (!Objects.isNull(project)) { + if (project != null) { fieldMapBuilder.put("project", project); } - if (!Objects.isNull(dataset)) { + if (dataset != null) { fieldMapBuilder.put("dataset", dataset); } - if (!Objects.isNull(table)) { + if (table != null) { fieldMapBuilder.put("table", table); } - if (!Objects.isNull(stream)) { + if (stream != null) { fieldMapBuilder.put("stream", stream); } fieldValuesMap = fieldMapBuilder.build(); diff --git a/synth.metadata b/synth.metadata index 27d26fe9d5..090443985d 100644 --- a/synth.metadata +++ b/synth.metadata @@ -18,32 +18,32 @@ "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "b53c4d98aab1eae3dac90b37019dede686782f13", - "internalRef": "345469340" + "sha": "7be2c821dd88109038c55c89f7dd48f092eeab9d", + "internalRef": "345476969" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "b53c4d98aab1eae3dac90b37019dede686782f13", - "internalRef": "345469340" + "sha": "7be2c821dd88109038c55c89f7dd48f092eeab9d", + "internalRef": "345476969" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "b53c4d98aab1eae3dac90b37019dede686782f13", - "internalRef": "345469340" + "sha": "7be2c821dd88109038c55c89f7dd48f092eeab9d", + "internalRef": "345476969" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "b53c4d98aab1eae3dac90b37019dede686782f13", - "internalRef": "345469340" + "sha": "7be2c821dd88109038c55c89f7dd48f092eeab9d", + "internalRef": "345476969" } }, { From 66fb6153ed6c688d21251484e50069b9dfa4811a Mon Sep 17 00:00:00 2001 From: yoshi-automation Date: Fri, 11 Dec 2020 16:34:00 -0800 Subject: [PATCH 4/6] chore: rollback migrating java-bigquerystorage to the Java microgenerator Committer: @miraleung PiperOrigin-RevId: 345522380 Source-Author: Google APIs Source-Date: Thu Dec 3 13:28:07 2020 -0800 Source-Repo: googleapis/googleapis Source-Sha: f8f975c7d43904e90d6c5f1684fdb6804400e641 Source-Link: https://github.com/googleapis/googleapis/commit/f8f975c7d43904e90d6c5f1684fdb6804400e641 --- .../storage/v1/BaseBigQueryReadClient.java | 164 +++++-- .../storage/v1/BaseBigQueryReadSettings.java | 33 +- .../bigquery/storage/v1/package-info.java | 23 +- .../storage/v1/stub/BigQueryReadStub.java | 9 +- .../v1/stub/BigQueryReadStubSettings.java | 99 ++-- .../stub/GrpcBigQueryReadCallableFactory.java | 40 +- .../storage/v1/stub/GrpcBigQueryReadStub.java | 37 +- .../storage/v1alpha2/BigQueryWriteClient.java | 435 ++++++++++++++---- .../v1alpha2/BigQueryWriteSettings.java | 79 ++-- .../storage/v1alpha2/package-info.java | 20 +- .../v1alpha2/stub/BigQueryWriteStub.java | 37 +- .../stub/BigQueryWriteStubSettings.java | 189 ++++---- .../GrpcBigQueryWriteCallableFactory.java | 40 +- .../v1alpha2/stub/GrpcBigQueryWriteStub.java | 264 +++++------ .../v1beta1/BaseBigQueryStorageClient.java | 316 ++++++++++--- .../v1beta1/BaseBigQueryStorageSettings.java | 71 ++- .../storage/v1beta1/package-info.java | 23 +- .../v1beta1/stub/BigQueryStorageStub.java | 34 +- .../stub/BigQueryStorageStubSettings.java | 184 ++++---- .../GrpcBigQueryStorageCallableFactory.java | 40 +- .../v1beta1/stub/GrpcBigQueryStorageStub.java | 194 ++++---- .../v1beta2/BaseBigQueryReadClient.java | 164 +++++-- .../v1beta2/BaseBigQueryReadSettings.java | 33 +- .../storage/v1beta2/BigQueryWriteClient.java | 318 +++++++++++-- .../v1beta2/BigQueryWriteSettings.java | 36 +- .../storage/v1beta2/package-info.java | 35 +- .../v1beta2/stub/BigQueryReadStub.java | 9 +- .../stub/BigQueryReadStubSettings.java | 148 ++++-- .../v1beta2/stub/BigQueryWriteStub.java | 9 +- .../stub/BigQueryWriteStubSettings.java | 159 ++++--- .../stub/GrpcBigQueryReadCallableFactory.java | 40 +- .../v1beta2/stub/GrpcBigQueryReadStub.java | 37 +- .../GrpcBigQueryWriteCallableFactory.java | 40 +- .../v1beta2/stub/GrpcBigQueryWriteStub.java | 49 +- .../v1/BaseBigQueryReadClientTest.java | 152 ++---- .../bigquery/storage/v1/MockBigQueryRead.java | 6 +- .../storage/v1/MockBigQueryReadImpl.java | 18 +- .../v1alpha2/BigQueryWriteClientTest.java | 360 ++++----------- .../storage/v1alpha2/MockBigQueryWrite.java | 6 +- .../v1alpha2/MockBigQueryWriteImpl.java | 79 ++-- .../BaseBigQueryStorageClientTest.java | 189 ++++---- .../storage/v1beta1/MockBigQueryStorage.java | 6 +- .../v1beta1/MockBigQueryStorageImpl.java | 58 +-- .../v1beta2/BaseBigQueryReadClientTest.java | 157 ++----- .../v1beta2/BigQueryWriteClientTest.java | 298 +++--------- .../storage/v1beta2/MockBigQueryRead.java | 6 +- .../storage/v1beta2/MockBigQueryReadImpl.java | 18 +- .../storage/v1beta2/MockBigQueryWrite.java | 6 +- .../v1beta2/MockBigQueryWriteImpl.java | 30 +- .../bigquery/storage/v1/ProjectName.java | 86 ++-- .../bigquery/storage/v1/ReadSessionName.java | 210 +++++++++ .../bigquery/storage/v1/ReadStreamName.java | 125 +++-- .../cloud/bigquery/storage/v1/TableName.java | 204 ++++++++ .../bigquery/storage/v1alpha2/TableName.java | 113 ++--- .../storage/v1alpha2/WriteStreamName.java | 125 +++-- .../bigquery/storage/v1beta1/ProjectName.java | 86 ++-- .../storage/v1beta1/ReadSessionName.java | 210 +++++++++ .../bigquery/storage/v1beta1/StreamName.java | 210 +++++++++ .../bigquery/storage/v1beta2/ProjectName.java | 86 ++-- .../storage/v1beta2/ReadSessionName.java | 210 +++++++++ .../storage/v1beta2/ReadStreamName.java | 125 +++-- .../bigquery/storage/v1beta2/TableName.java | 113 ++--- .../storage/v1beta2/WriteStreamName.java | 125 +++-- synth.metadata | 21 +- 64 files changed, 4170 insertions(+), 2676 deletions(-) create mode 100644 proto-google-cloud-bigquerystorage-v1/src/main/java/com/google/cloud/bigquery/storage/v1/ReadSessionName.java create mode 100644 proto-google-cloud-bigquerystorage-v1/src/main/java/com/google/cloud/bigquery/storage/v1/TableName.java create mode 100644 proto-google-cloud-bigquerystorage-v1beta1/src/main/java/com/google/cloud/bigquery/storage/v1beta1/ReadSessionName.java create mode 100644 proto-google-cloud-bigquerystorage-v1beta1/src/main/java/com/google/cloud/bigquery/storage/v1beta1/StreamName.java create mode 100644 proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/ReadSessionName.java diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/BaseBigQueryReadClient.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/BaseBigQueryReadClient.java index cce27f0e15..3c36401335 100644 --- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/BaseBigQueryReadClient.java +++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/BaseBigQueryReadClient.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,7 +13,6 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - package com.google.cloud.bigquery.storage.v1; import com.google.api.core.BetaApi; @@ -26,7 +25,7 @@ import java.util.concurrent.TimeUnit; import javax.annotation.Generated; -// AUTO-GENERATED DOCUMENTATION AND CLASS. +// AUTO-GENERATED DOCUMENTATION AND SERVICE /** * Service Description: BigQuery Read API. * @@ -35,7 +34,18 @@ *

This class provides the ability to make remote calls to the backing service through method * calls that map to API methods. Sample code to get started: * - *

Note: close() needs to be called on the BaseBigQueryReadClient object to clean up resources + *

+ * 
+ * try (BaseBigQueryReadClient baseBigQueryReadClient = BaseBigQueryReadClient.create()) {
+ *   ProjectName parent = ProjectName.of("[PROJECT]");
+ *   ReadSession readSession = ReadSession.newBuilder().build();
+ *   int maxStreamCount = 0;
+ *   ReadSession response = baseBigQueryReadClient.createReadSession(parent, readSession, maxStreamCount);
+ * }
+ * 
+ * 
+ * + *

Note: close() needs to be called on the baseBigQueryReadClient object to clean up resources * such as threads. In the example above, try-with-resources is used, which automatically calls * close(). * @@ -64,28 +74,30 @@ * *

To customize credentials: * - *

{@code
+ * 
+ * 
  * BaseBigQueryReadSettings baseBigQueryReadSettings =
  *     BaseBigQueryReadSettings.newBuilder()
  *         .setCredentialsProvider(FixedCredentialsProvider.create(myCredentials))
  *         .build();
  * BaseBigQueryReadClient baseBigQueryReadClient =
  *     BaseBigQueryReadClient.create(baseBigQueryReadSettings);
- * }
+ * + *
* - *

To customize the endpoint: + * To customize the endpoint: * - *

{@code
+ * 
+ * 
  * BaseBigQueryReadSettings baseBigQueryReadSettings =
  *     BaseBigQueryReadSettings.newBuilder().setEndpoint(myEndpoint).build();
  * BaseBigQueryReadClient baseBigQueryReadClient =
  *     BaseBigQueryReadClient.create(baseBigQueryReadSettings);
- * }
- * - *

Please refer to the GitHub repository's samples for more quickstart code snippets. + * + *

*/ -@BetaApi @Generated("by gapic-generator") +@BetaApi public class BaseBigQueryReadClient implements BackgroundResource { private final BaseBigQueryReadSettings settings; private final BigQueryReadStub stub; @@ -106,7 +118,7 @@ public static final BaseBigQueryReadClient create(BaseBigQueryReadSettings setti /** * Constructs an instance of BaseBigQueryReadClient, using the given stub for making calls. This - * is for advanced usage - prefer using create(BaseBigQueryReadSettings). + * is for advanced usage - prefer to use BaseBigQueryReadSettings}. */ @BetaApi("A restructuring of stub classes is planned, so this may break in the future") public static final BaseBigQueryReadClient create(BigQueryReadStub stub) { @@ -138,7 +150,7 @@ public BigQueryReadStub getStub() { return stub; } - // AUTO-GENERATED DOCUMENTATION AND METHOD. + // AUTO-GENERATED DOCUMENTATION AND METHOD /** * Creates a new read session. A read session divides the contents of a BigQuery table into one or * more streams, which can then be used to read data from the table. The read session also @@ -157,14 +169,25 @@ public BigQueryReadStub getStub() { *

Read sessions automatically expire 24 hours after they are created and do not require manual * clean-up by the caller. * + *

Sample code: + * + *


+   * try (BaseBigQueryReadClient baseBigQueryReadClient = BaseBigQueryReadClient.create()) {
+   *   ProjectName parent = ProjectName.of("[PROJECT]");
+   *   ReadSession readSession = ReadSession.newBuilder().build();
+   *   int maxStreamCount = 0;
+   *   ReadSession response = baseBigQueryReadClient.createReadSession(parent, readSession, maxStreamCount);
+   * }
+   * 
+ * * @param parent Required. The request project that owns the session, in the form of * `projects/{project_id}`. - * @param read_session Required. Session to be created. - * @param max_stream_count Max initial number of streams. If unset or zero, the server will - * provide a value of streams so as to produce reasonable throughput. Must be non-negative. - * The number of streams may be lower than the requested number, depending on the amount - * parallelism that is reasonable for the table. Error will be returned if the max count is - * greater than the current system max limit of 1,000. + * @param readSession Required. Session to be created. + * @param maxStreamCount Max initial number of streams. If unset or zero, the server will provide + * a value of streams so as to produce reasonable throughput. Must be non-negative. The number + * of streams may be lower than the requested number, depending on the amount parallelism that + * is reasonable for the table. Error will be returned if the max count is greater than the + * current system max limit of 1,000. *

Streams must be read starting from offset 0. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -179,7 +202,7 @@ public final ReadSession createReadSession( return createReadSession(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD. + // AUTO-GENERATED DOCUMENTATION AND METHOD /** * Creates a new read session. A read session divides the contents of a BigQuery table into one or * more streams, which can then be used to read data from the table. The read session also @@ -198,14 +221,25 @@ public final ReadSession createReadSession( *

Read sessions automatically expire 24 hours after they are created and do not require manual * clean-up by the caller. * + *

Sample code: + * + *


+   * try (BaseBigQueryReadClient baseBigQueryReadClient = BaseBigQueryReadClient.create()) {
+   *   ProjectName parent = ProjectName.of("[PROJECT]");
+   *   ReadSession readSession = ReadSession.newBuilder().build();
+   *   int maxStreamCount = 0;
+   *   ReadSession response = baseBigQueryReadClient.createReadSession(parent.toString(), readSession, maxStreamCount);
+   * }
+   * 
+ * * @param parent Required. The request project that owns the session, in the form of * `projects/{project_id}`. - * @param read_session Required. Session to be created. - * @param max_stream_count Max initial number of streams. If unset or zero, the server will - * provide a value of streams so as to produce reasonable throughput. Must be non-negative. - * The number of streams may be lower than the requested number, depending on the amount - * parallelism that is reasonable for the table. Error will be returned if the max count is - * greater than the current system max limit of 1,000. + * @param readSession Required. Session to be created. + * @param maxStreamCount Max initial number of streams. If unset or zero, the server will provide + * a value of streams so as to produce reasonable throughput. Must be non-negative. The number + * of streams may be lower than the requested number, depending on the amount parallelism that + * is reasonable for the table. Error will be returned if the max count is greater than the + * current system max limit of 1,000. *

Streams must be read starting from offset 0. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -220,7 +254,7 @@ public final ReadSession createReadSession( return createReadSession(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD. + // AUTO-GENERATED DOCUMENTATION AND METHOD /** * Creates a new read session. A read session divides the contents of a BigQuery table into one or * more streams, which can then be used to read data from the table. The read session also @@ -239,6 +273,20 @@ public final ReadSession createReadSession( *

Read sessions automatically expire 24 hours after they are created and do not require manual * clean-up by the caller. * + *

Sample code: + * + *


+   * try (BaseBigQueryReadClient baseBigQueryReadClient = BaseBigQueryReadClient.create()) {
+   *   ProjectName parent = ProjectName.of("[PROJECT]");
+   *   ReadSession readSession = ReadSession.newBuilder().build();
+   *   CreateReadSessionRequest request = CreateReadSessionRequest.newBuilder()
+   *     .setParent(parent.toString())
+   *     .setReadSession(readSession)
+   *     .build();
+   *   ReadSession response = baseBigQueryReadClient.createReadSession(request);
+   * }
+   * 
+ * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -246,7 +294,7 @@ public final ReadSession createReadSession(CreateReadSessionRequest request) { return createReadSessionCallable().call(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD. + // AUTO-GENERATED DOCUMENTATION AND METHOD /** * Creates a new read session. A read session divides the contents of a BigQuery table into one or * more streams, which can then be used to read data from the table. The read session also @@ -266,12 +314,26 @@ public final ReadSession createReadSession(CreateReadSessionRequest request) { * clean-up by the caller. * *

Sample code: + * + *


+   * try (BaseBigQueryReadClient baseBigQueryReadClient = BaseBigQueryReadClient.create()) {
+   *   ProjectName parent = ProjectName.of("[PROJECT]");
+   *   ReadSession readSession = ReadSession.newBuilder().build();
+   *   CreateReadSessionRequest request = CreateReadSessionRequest.newBuilder()
+   *     .setParent(parent.toString())
+   *     .setReadSession(readSession)
+   *     .build();
+   *   ApiFuture<ReadSession> future = baseBigQueryReadClient.createReadSessionCallable().futureCall(request);
+   *   // Do something
+   *   ReadSession response = future.get();
+   * }
+   * 
*/ public final UnaryCallable createReadSessionCallable() { return stub.createReadSessionCallable(); } - // AUTO-GENERATED DOCUMENTATION AND METHOD. + // AUTO-GENERATED DOCUMENTATION AND METHOD /** * Reads rows from the stream in the format prescribed by the ReadSession. Each response contains * one or more table rows, up to a maximum of 100 MiB per response; read requests which attempt to @@ -281,12 +343,26 @@ public final UnaryCallable createReadSess * stream. * *

Sample code: + * + *


+   * try (BaseBigQueryReadClient baseBigQueryReadClient = BaseBigQueryReadClient.create()) {
+   *   ReadStreamName readStream = ReadStreamName.of("[PROJECT]", "[LOCATION]", "[SESSION]", "[STREAM]");
+   *   ReadRowsRequest request = ReadRowsRequest.newBuilder()
+   *     .setReadStream(readStream.toString())
+   *     .build();
+   *
+   *   ServerStream<ReadRowsResponse> stream = baseBigQueryReadClient.readRowsCallable().call(request);
+   *   for (ReadRowsResponse response : stream) {
+   *     // Do something when receive a response
+   *   }
+   * }
+   * 
*/ public final ServerStreamingCallable readRowsCallable() { return stub.readRowsCallable(); } - // AUTO-GENERATED DOCUMENTATION AND METHOD. + // AUTO-GENERATED DOCUMENTATION AND METHOD /** * Splits a given `ReadStream` into two `ReadStream` objects. These `ReadStream` objects are * referred to as the primary and the residual streams of the split. The original `ReadStream` can @@ -299,6 +375,18 @@ public final ServerStreamingCallable readRows * original[0-j] = primary[0-j] and original[j-n] = residual[0-m] once the streams have been read * to completion. * + *

Sample code: + * + *


+   * try (BaseBigQueryReadClient baseBigQueryReadClient = BaseBigQueryReadClient.create()) {
+   *   ReadStreamName name = ReadStreamName.of("[PROJECT]", "[LOCATION]", "[SESSION]", "[STREAM]");
+   *   SplitReadStreamRequest request = SplitReadStreamRequest.newBuilder()
+   *     .setName(name.toString())
+   *     .build();
+   *   SplitReadStreamResponse response = baseBigQueryReadClient.splitReadStream(request);
+   * }
+   * 
+ * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -306,7 +394,7 @@ public final SplitReadStreamResponse splitReadStream(SplitReadStreamRequest requ return splitReadStreamCallable().call(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD. + // AUTO-GENERATED DOCUMENTATION AND METHOD /** * Splits a given `ReadStream` into two `ReadStream` objects. These `ReadStream` objects are * referred to as the primary and the residual streams of the split. The original `ReadStream` can @@ -320,6 +408,18 @@ public final SplitReadStreamResponse splitReadStream(SplitReadStreamRequest requ * to completion. * *

Sample code: + * + *


+   * try (BaseBigQueryReadClient baseBigQueryReadClient = BaseBigQueryReadClient.create()) {
+   *   ReadStreamName name = ReadStreamName.of("[PROJECT]", "[LOCATION]", "[SESSION]", "[STREAM]");
+   *   SplitReadStreamRequest request = SplitReadStreamRequest.newBuilder()
+   *     .setName(name.toString())
+   *     .build();
+   *   ApiFuture<SplitReadStreamResponse> future = baseBigQueryReadClient.splitReadStreamCallable().futureCall(request);
+   *   // Do something
+   *   SplitReadStreamResponse response = future.get();
+   * }
+   * 
*/ public final UnaryCallable splitReadStreamCallable() { diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/BaseBigQueryReadSettings.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/BaseBigQueryReadSettings.java index d9669ef04a..7dfff71a93 100644 --- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/BaseBigQueryReadSettings.java +++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/BaseBigQueryReadSettings.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,7 +13,6 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - package com.google.cloud.bigquery.storage.v1; import com.google.api.core.ApiFunction; @@ -32,7 +31,7 @@ import java.util.List; import javax.annotation.Generated; -// AUTO-GENERATED DOCUMENTATION AND CLASS. +// AUTO-GENERATED DOCUMENTATION AND CLASS /** * Settings class to configure an instance of {@link BaseBigQueryReadClient}. * @@ -50,24 +49,23 @@ * *

For example, to set the total timeout of createReadSession to 30 seconds: * - *

{@code
+ * 
+ * 
  * BaseBigQueryReadSettings.Builder baseBigQueryReadSettingsBuilder =
  *     BaseBigQueryReadSettings.newBuilder();
  * baseBigQueryReadSettingsBuilder
  *     .createReadSessionSettings()
  *     .setRetrySettings(
- *         baseBigQueryReadSettingsBuilder
- *             .createReadSessionSettings()
- *             .getRetrySettings()
- *             .toBuilder()
+ *         baseBigQueryReadSettingsBuilder.createReadSessionSettings().getRetrySettings().toBuilder()
  *             .setTotalTimeout(Duration.ofSeconds(30))
  *             .build());
  * BaseBigQueryReadSettings baseBigQueryReadSettings = baseBigQueryReadSettingsBuilder.build();
- * }
+ * + *
*/ -@Generated("by gapic-generator-java") +@Generated("by gapic-generator") +@BetaApi public class BaseBigQueryReadSettings extends ClientSettings { - /** Returns the object with the settings used for calls to createReadSession. */ public UnaryCallSettings createReadSessionSettings() { return ((BigQueryReadStubSettings) getStubSettings()).createReadSessionSettings(); @@ -144,15 +142,18 @@ protected BaseBigQueryReadSettings(Builder settingsBuilder) throws IOException { /** Builder for BaseBigQueryReadSettings. */ public static class Builder extends ClientSettings.Builder { - protected Builder() throws IOException { - this(((ClientContext) null)); + this((ClientContext) null); } protected Builder(ClientContext clientContext) { super(BigQueryReadStubSettings.newBuilder(clientContext)); } + private static Builder createDefault() { + return new Builder(BigQueryReadStubSettings.newBuilder()); + } + protected Builder(BaseBigQueryReadSettings settings) { super(settings.getStubSettings().toBuilder()); } @@ -161,15 +162,11 @@ protected Builder(BigQueryReadStubSettings.Builder stubSettings) { super(stubSettings); } - private static Builder createDefault() { - return new Builder(BigQueryReadStubSettings.newBuilder()); - } - public BigQueryReadStubSettings.Builder getStubSettingsBuilder() { return ((BigQueryReadStubSettings.Builder) getStubSettings()); } - // NEXT_MAJOR_VER: remove 'throws Exception'. + // NEXT_MAJOR_VER: remove 'throws Exception' /** * Applies the given settings updater function to all of the unary API methods in this service. * diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/package-info.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/package-info.java index b6a07a3c4a..a29e6a13d4 100644 --- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/package-info.java +++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/package-info.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,17 +15,30 @@ */ /** - * The interfaces provided are listed below, along with usage samples. + * A client to BigQuery Storage API. * - *

======================= BigQueryReadClient ======================= + *

The interfaces provided are listed below, along with usage samples. + * + *

====================== BaseBigQueryReadClient ====================== * *

Service Description: BigQuery Read API. * *

The Read API can be used to read data from BigQuery. * - *

Sample for BigQueryReadClient: + *

Sample for BaseBigQueryReadClient: + * + *

+ * 
+ * try (BaseBigQueryReadClient baseBigQueryReadClient = BaseBigQueryReadClient.create()) {
+ *   ProjectName parent = ProjectName.of("[PROJECT]");
+ *   ReadSession readSession = ReadSession.newBuilder().build();
+ *   int maxStreamCount = 0;
+ *   ReadSession response = baseBigQueryReadClient.createReadSession(parent, readSession, maxStreamCount);
+ * }
+ * 
+ * 
*/ -@Generated("by gapic-generator-java") +@Generated("by gapic-generator") package com.google.cloud.bigquery.storage.v1; import javax.annotation.Generated; diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/stub/BigQueryReadStub.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/stub/BigQueryReadStub.java index 85cb247aaf..01bff92268 100644 --- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/stub/BigQueryReadStub.java +++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/stub/BigQueryReadStub.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,9 +13,9 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - package com.google.cloud.bigquery.storage.v1.stub; +import com.google.api.core.BetaApi; import com.google.api.gax.core.BackgroundResource; import com.google.api.gax.rpc.ServerStreamingCallable; import com.google.api.gax.rpc.UnaryCallable; @@ -27,13 +27,14 @@ import com.google.cloud.bigquery.storage.v1.SplitReadStreamResponse; import javax.annotation.Generated; -// AUTO-GENERATED DOCUMENTATION AND CLASS. +// AUTO-GENERATED DOCUMENTATION AND CLASS /** - * Base stub class for the BigQueryRead service API. + * Base stub class for BigQuery Storage API. * *

This class is for advanced usage and reflects the underlying API directly. */ @Generated("by gapic-generator") +@BetaApi("A restructuring of stub classes is planned, so this may break in the future") public abstract class BigQueryReadStub implements BackgroundResource { public UnaryCallable createReadSessionCallable() { diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/stub/BigQueryReadStubSettings.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/stub/BigQueryReadStubSettings.java index 643f8c3d21..1b657327c5 100644 --- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/stub/BigQueryReadStubSettings.java +++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/stub/BigQueryReadStubSettings.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,7 +13,6 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - package com.google.cloud.bigquery.storage.v1.stub; import com.google.api.core.ApiFunction; @@ -47,7 +46,7 @@ import javax.annotation.Generated; import org.threeten.bp.Duration; -// AUTO-GENERATED DOCUMENTATION AND CLASS. +// AUTO-GENERATED DOCUMENTATION AND CLASS /** * Settings class to configure an instance of {@link BigQueryReadStub}. * @@ -65,23 +64,22 @@ * *

For example, to set the total timeout of createReadSession to 30 seconds: * - *

{@code
- * BigQueryReadStubSettings.Builder bigQueryReadSettingsBuilder =
+ * 
+ * 
+ * BigQueryReadStubSettings.Builder baseBigQueryReadSettingsBuilder =
  *     BigQueryReadStubSettings.newBuilder();
- * bigQueryReadSettingsBuilder
+ * baseBigQueryReadSettingsBuilder
  *     .createReadSessionSettings()
  *     .setRetrySettings(
- *         bigQueryReadSettingsBuilder
- *             .createReadSessionSettings()
- *             .getRetrySettings()
- *             .toBuilder()
+ *         baseBigQueryReadSettingsBuilder.createReadSessionSettings().getRetrySettings().toBuilder()
  *             .setTotalTimeout(Duration.ofSeconds(30))
  *             .build());
- * BigQueryReadStubSettings bigQueryReadSettings = bigQueryReadSettingsBuilder.build();
- * }
+ * BigQueryReadStubSettings baseBigQueryReadSettings = baseBigQueryReadSettingsBuilder.build(); + * + *
*/ +@Generated("by gapic-generator") @BetaApi -@Generated("by gapic-generator-java") public class BigQueryReadStubSettings extends StubSettings { /** The default scopes of the service. */ private static final ImmutableList DEFAULT_SERVICE_SCOPES = @@ -118,10 +116,10 @@ public BigQueryReadStub createStub() throws IOException { .getTransportName() .equals(GrpcTransportChannel.getGrpcTransportName())) { return GrpcBigQueryReadStub.create(this); + } else { + throw new UnsupportedOperationException( + "Transport not supported: " + getTransportChannelProvider().getTransportName()); } - throw new UnsupportedOperationException( - String.format( - "Transport not supported: %s", getTransportChannelProvider().getTransportName())); } /** Returns a builder for the default ExecutorProvider for this service. */ @@ -189,12 +187,14 @@ protected BigQueryReadStubSettings(Builder settingsBuilder) throws IOException { /** Builder for BigQueryReadStubSettings. */ public static class Builder extends StubSettings.Builder { private final ImmutableList> unaryMethodSettingsBuilders; + private final UnaryCallSettings.Builder createReadSessionSettings; private final ServerStreamingCallSettings.Builder readRowsSettings; private final UnaryCallSettings.Builder splitReadStreamSettings; + private static final ImmutableMap> RETRYABLE_CODE_DEFINITIONS; @@ -202,18 +202,19 @@ public static class Builder extends StubSettings.Builder> definitions = ImmutableMap.builder(); definitions.put( - "retry_policy_0_codes", + "retry_policy_1_codes", ImmutableSet.copyOf( Lists.newArrayList( StatusCode.Code.DEADLINE_EXCEEDED, StatusCode.Code.UNAVAILABLE))); + definitions.put("no_retry_codes", ImmutableSet.copyOf(Lists.newArrayList())); definitions.put( - "retry_policy_1_codes", - ImmutableSet.copyOf(Lists.newArrayList(StatusCode.Code.UNAVAILABLE))); - definitions.put( - "retry_policy_2_codes", + "retry_policy_3_codes", ImmutableSet.copyOf( Lists.newArrayList( StatusCode.Code.DEADLINE_EXCEEDED, StatusCode.Code.UNAVAILABLE))); + definitions.put( + "retry_policy_2_codes", + ImmutableSet.copyOf(Lists.newArrayList(StatusCode.Code.UNAVAILABLE))); RETRYABLE_CODE_DEFINITIONS = definitions.build(); } @@ -232,7 +233,7 @@ public static class Builder extends StubSettings.Builder>of( createReadSessionSettings, splitReadStreamSettings); - initDefaults(this); - } - protected Builder(BigQueryReadStubSettings settings) { - super(settings); - - createReadSessionSettings = settings.createReadSessionSettings.toBuilder(); - readRowsSettings = settings.readRowsSettings.toBuilder(); - splitReadStreamSettings = settings.splitReadStreamSettings.toBuilder(); - - unaryMethodSettingsBuilders = - ImmutableList.>of( - createReadSessionSettings, splitReadStreamSettings); + initDefaults(this); } private static Builder createDefault() { - Builder builder = new Builder(((ClientContext) null)); - + Builder builder = new Builder((ClientContext) null); builder.setTransportChannelProvider(defaultTransportChannelProvider()); builder.setCredentialsProvider(defaultCredentialsProviderBuilder().build()); builder.setInternalHeaderProvider(defaultApiClientHeaderProviderBuilder().build()); builder.setEndpoint(getDefaultEndpoint()); - return initDefaults(builder); } private static Builder initDefaults(Builder builder) { - builder - .createReadSessionSettings() - .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) - .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")); builder - .readRowsSettings() + .createReadSessionSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_1_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_1_params")); builder - .splitReadStreamSettings() + .readRowsSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_2_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_2_params")); + builder + .splitReadStreamSettings() + .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_3_codes")) + .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_3_params")); + return builder; } - // NEXT_MAJOR_VER: remove 'throws Exception'. + protected Builder(BigQueryReadStubSettings settings) { + super(settings); + + createReadSessionSettings = settings.createReadSessionSettings.toBuilder(); + readRowsSettings = settings.readRowsSettings.toBuilder(); + splitReadStreamSettings = settings.splitReadStreamSettings.toBuilder(); + + unaryMethodSettingsBuilders = + ImmutableList.>of( + createReadSessionSettings, splitReadStreamSettings); + } + + // NEXT_MAJOR_VER: remove 'throws Exception' /** * Applies the given settings updater function to all of the unary API methods in this service. * diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/stub/GrpcBigQueryReadCallableFactory.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/stub/GrpcBigQueryReadCallableFactory.java index 3da5e2a734..886b58e704 100644 --- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/stub/GrpcBigQueryReadCallableFactory.java +++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/stub/GrpcBigQueryReadCallableFactory.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,9 +13,9 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - package com.google.cloud.bigquery.storage.v1.stub; +import com.google.api.core.BetaApi; import com.google.api.gax.grpc.GrpcCallSettings; import com.google.api.gax.grpc.GrpcCallableFactory; import com.google.api.gax.grpc.GrpcStubCallableFactory; @@ -31,19 +31,18 @@ import com.google.api.gax.rpc.StreamingCallSettings; import com.google.api.gax.rpc.UnaryCallSettings; import com.google.api.gax.rpc.UnaryCallable; -import com.google.longrunning.Operation; import com.google.longrunning.stub.OperationsStub; import javax.annotation.Generated; -// AUTO-GENERATED DOCUMENTATION AND CLASS. +// AUTO-GENERATED DOCUMENTATION AND CLASS /** - * gRPC callable factory implementation for the BigQueryRead service API. + * gRPC callable factory implementation for BigQuery Storage API. * *

This class is for advanced usage. */ @Generated("by gapic-generator") +@BetaApi("The surface for use by generated code is not stable yet and may change in the future.") public class GrpcBigQueryReadCallableFactory implements GrpcStubCallableFactory { - @Override public UnaryCallable createUnaryCallable( GrpcCallSettings grpcCallSettings, @@ -56,58 +55,61 @@ public UnaryCallable createUnaryCalla public UnaryCallable createPagedCallable( GrpcCallSettings grpcCallSettings, - PagedCallSettings callSettings, + PagedCallSettings pagedCallSettings, ClientContext clientContext) { - return GrpcCallableFactory.createPagedCallable(grpcCallSettings, callSettings, clientContext); + return GrpcCallableFactory.createPagedCallable( + grpcCallSettings, pagedCallSettings, clientContext); } @Override public UnaryCallable createBatchingCallable( GrpcCallSettings grpcCallSettings, - BatchingCallSettings callSettings, + BatchingCallSettings batchingCallSettings, ClientContext clientContext) { return GrpcCallableFactory.createBatchingCallable( - grpcCallSettings, callSettings, clientContext); + grpcCallSettings, batchingCallSettings, clientContext); } + @BetaApi( + "The surface for long-running operations is not stable yet and may change in the future.") @Override public OperationCallable createOperationCallable( - GrpcCallSettings grpcCallSettings, - OperationCallSettings callSettings, + GrpcCallSettings grpcCallSettings, + OperationCallSettings operationCallSettings, ClientContext clientContext, OperationsStub operationsStub) { return GrpcCallableFactory.createOperationCallable( - grpcCallSettings, callSettings, clientContext, operationsStub); + grpcCallSettings, operationCallSettings, clientContext, operationsStub); } @Override public BidiStreamingCallable createBidiStreamingCallable( GrpcCallSettings grpcCallSettings, - StreamingCallSettings callSettings, + StreamingCallSettings streamingCallSettings, ClientContext clientContext) { return GrpcCallableFactory.createBidiStreamingCallable( - grpcCallSettings, callSettings, clientContext); + grpcCallSettings, streamingCallSettings, clientContext); } @Override public ServerStreamingCallable createServerStreamingCallable( GrpcCallSettings grpcCallSettings, - ServerStreamingCallSettings callSettings, + ServerStreamingCallSettings streamingCallSettings, ClientContext clientContext) { return GrpcCallableFactory.createServerStreamingCallable( - grpcCallSettings, callSettings, clientContext); + grpcCallSettings, streamingCallSettings, clientContext); } @Override public ClientStreamingCallable createClientStreamingCallable( GrpcCallSettings grpcCallSettings, - StreamingCallSettings callSettings, + StreamingCallSettings streamingCallSettings, ClientContext clientContext) { return GrpcCallableFactory.createClientStreamingCallable( - grpcCallSettings, callSettings, clientContext); + grpcCallSettings, streamingCallSettings, clientContext); } } diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/stub/GrpcBigQueryReadStub.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/stub/GrpcBigQueryReadStub.java index 6e887492d7..edb90c4e5a 100644 --- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/stub/GrpcBigQueryReadStub.java +++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/stub/GrpcBigQueryReadStub.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,9 +13,9 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - package com.google.cloud.bigquery.storage.v1.stub; +import com.google.api.core.BetaApi; import com.google.api.gax.core.BackgroundResource; import com.google.api.gax.core.BackgroundResourceAggregation; import com.google.api.gax.grpc.GrpcCallSettings; @@ -31,7 +31,6 @@ import com.google.cloud.bigquery.storage.v1.SplitReadStreamRequest; import com.google.cloud.bigquery.storage.v1.SplitReadStreamResponse; import com.google.common.collect.ImmutableMap; -import com.google.longrunning.stub.GrpcOperationsStub; import io.grpc.MethodDescriptor; import io.grpc.protobuf.ProtoUtils; import java.io.IOException; @@ -39,14 +38,16 @@ import java.util.concurrent.TimeUnit; import javax.annotation.Generated; -// AUTO-GENERATED DOCUMENTATION AND CLASS. +// AUTO-GENERATED DOCUMENTATION AND CLASS /** - * gRPC stub implementation for the BigQueryRead service API. + * gRPC stub implementation for BigQuery Storage API. * *

This class is for advanced usage and reflects the underlying API directly. */ -@Generated("by gapic-generator-java") +@Generated("by gapic-generator") +@BetaApi("A restructuring of stub classes is planned, so this may break in the future") public class GrpcBigQueryReadStub extends BigQueryReadStub { + private static final MethodDescriptor createReadSessionMethodDescriptor = MethodDescriptor.newBuilder() @@ -56,7 +57,6 @@ public class GrpcBigQueryReadStub extends BigQueryReadStub { ProtoUtils.marshaller(CreateReadSessionRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(ReadSession.getDefaultInstance())) .build(); - private static final MethodDescriptor readRowsMethodDescriptor = MethodDescriptor.newBuilder() @@ -65,7 +65,6 @@ public class GrpcBigQueryReadStub extends BigQueryReadStub { .setRequestMarshaller(ProtoUtils.marshaller(ReadRowsRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(ReadRowsResponse.getDefaultInstance())) .build(); - private static final MethodDescriptor splitReadStreamMethodDescriptor = MethodDescriptor.newBuilder() @@ -77,13 +76,13 @@ public class GrpcBigQueryReadStub extends BigQueryReadStub { ProtoUtils.marshaller(SplitReadStreamResponse.getDefaultInstance())) .build(); + private final BackgroundResource backgroundResources; + private final UnaryCallable createReadSessionCallable; private final ServerStreamingCallable readRowsCallable; private final UnaryCallable splitReadStreamCallable; - private final BackgroundResource backgroundResources; - private final GrpcOperationsStub operationsStub; private final GrpcStubCallableFactory callableFactory; public static final GrpcBigQueryReadStub create(BigQueryReadStubSettings settings) @@ -101,18 +100,27 @@ public static final GrpcBigQueryReadStub create( BigQueryReadStubSettings.newBuilder().build(), clientContext, callableFactory); } + /** + * Constructs an instance of GrpcBigQueryReadStub, using the given settings. This is protected so + * that it is easy to make a subclass, but otherwise, the static factory methods should be + * preferred. + */ protected GrpcBigQueryReadStub(BigQueryReadStubSettings settings, ClientContext clientContext) throws IOException { this(settings, clientContext, new GrpcBigQueryReadCallableFactory()); } + /** + * Constructs an instance of GrpcBigQueryReadStub, using the given settings. This is protected so + * that it is easy to make a subclass, but otherwise, the static factory methods should be + * preferred. + */ protected GrpcBigQueryReadStub( BigQueryReadStubSettings settings, ClientContext clientContext, GrpcStubCallableFactory callableFactory) throws IOException { this.callableFactory = callableFactory; - this.operationsStub = GrpcOperationsStub.create(clientContext, callableFactory); GrpcCallSettings createReadSessionTransportSettings = GrpcCallSettings.newBuilder() @@ -168,12 +176,7 @@ public Map extract(SplitReadStreamRequest request) { callableFactory.createUnaryCallable( splitReadStreamTransportSettings, settings.splitReadStreamSettings(), clientContext); - this.backgroundResources = - new BackgroundResourceAggregation(clientContext.getBackgroundResources()); - } - - public GrpcOperationsStub getOperationsStub() { - return operationsStub; + backgroundResources = new BackgroundResourceAggregation(clientContext.getBackgroundResources()); } public UnaryCallable createReadSessionCallable() { diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/BigQueryWriteClient.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/BigQueryWriteClient.java index 6f23b528ca..edeb6e2800 100644 --- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/BigQueryWriteClient.java +++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/BigQueryWriteClient.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,20 +13,30 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - package com.google.cloud.bigquery.storage.v1alpha2; import com.google.api.core.BetaApi; import com.google.api.gax.core.BackgroundResource; import com.google.api.gax.rpc.BidiStreamingCallable; import com.google.api.gax.rpc.UnaryCallable; +import com.google.cloud.bigquery.storage.v1alpha2.Storage.AppendRowsRequest; +import com.google.cloud.bigquery.storage.v1alpha2.Storage.AppendRowsResponse; +import com.google.cloud.bigquery.storage.v1alpha2.Storage.BatchCommitWriteStreamsRequest; +import com.google.cloud.bigquery.storage.v1alpha2.Storage.BatchCommitWriteStreamsResponse; +import com.google.cloud.bigquery.storage.v1alpha2.Storage.CreateWriteStreamRequest; +import com.google.cloud.bigquery.storage.v1alpha2.Storage.FinalizeWriteStreamRequest; +import com.google.cloud.bigquery.storage.v1alpha2.Storage.FinalizeWriteStreamResponse; +import com.google.cloud.bigquery.storage.v1alpha2.Storage.FlushRowsRequest; +import com.google.cloud.bigquery.storage.v1alpha2.Storage.FlushRowsResponse; +import com.google.cloud.bigquery.storage.v1alpha2.Storage.GetWriteStreamRequest; +import com.google.cloud.bigquery.storage.v1alpha2.Stream.WriteStream; import com.google.cloud.bigquery.storage.v1alpha2.stub.BigQueryWriteStub; import com.google.cloud.bigquery.storage.v1alpha2.stub.BigQueryWriteStubSettings; import java.io.IOException; import java.util.concurrent.TimeUnit; import javax.annotation.Generated; -// AUTO-GENERATED DOCUMENTATION AND CLASS. +// AUTO-GENERATED DOCUMENTATION AND SERVICE /** * Service Description: BigQuery Write API. * @@ -35,7 +45,17 @@ *

This class provides the ability to make remote calls to the backing service through method * calls that map to API methods. Sample code to get started: * - *

Note: close() needs to be called on the BigQueryWriteClient object to clean up resources such + *

+ * 
+ * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
+ *   TableName parent = TableName.of("[PROJECT]", "[DATASET]", "[TABLE]");
+ *   WriteStream writeStream = WriteStream.newBuilder().build();
+ *   WriteStream response = bigQueryWriteClient.createWriteStream(parent, writeStream);
+ * }
+ * 
+ * 
+ * + *

Note: close() needs to be called on the bigQueryWriteClient object to clean up resources such * as threads. In the example above, try-with-resources is used, which automatically calls close(). * *

The surface of this class includes several types of Java methods for each of the API's @@ -63,26 +83,30 @@ * *

To customize credentials: * - *

{@code
+ * 
+ * 
  * BigQueryWriteSettings bigQueryWriteSettings =
  *     BigQueryWriteSettings.newBuilder()
  *         .setCredentialsProvider(FixedCredentialsProvider.create(myCredentials))
  *         .build();
- * BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create(bigQueryWriteSettings);
- * }
+ * BigQueryWriteClient bigQueryWriteClient = + * BigQueryWriteClient.create(bigQueryWriteSettings); + * + *
* - *

To customize the endpoint: + * To customize the endpoint: * - *

{@code
+ * 
+ * 
  * BigQueryWriteSettings bigQueryWriteSettings =
  *     BigQueryWriteSettings.newBuilder().setEndpoint(myEndpoint).build();
- * BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create(bigQueryWriteSettings);
- * }
- * - *

Please refer to the GitHub repository's samples for more quickstart code snippets. + * BigQueryWriteClient bigQueryWriteClient = + * BigQueryWriteClient.create(bigQueryWriteSettings); + * + *

*/ -@BetaApi @Generated("by gapic-generator") +@BetaApi public class BigQueryWriteClient implements BackgroundResource { private final BigQueryWriteSettings settings; private final BigQueryWriteStub stub; @@ -103,7 +127,7 @@ public static final BigQueryWriteClient create(BigQueryWriteSettings settings) /** * Constructs an instance of BigQueryWriteClient, using the given stub for making calls. This is - * for advanced usage - prefer using create(BigQueryWriteSettings). + * for advanced usage - prefer to use BigQueryWriteSettings}. */ @BetaApi("A restructuring of stub classes is planned, so this may break in the future") public static final BigQueryWriteClient create(BigQueryWriteStub stub) { @@ -135,66 +159,109 @@ public BigQueryWriteStub getStub() { return stub; } - // AUTO-GENERATED DOCUMENTATION AND METHOD. + // AUTO-GENERATED DOCUMENTATION AND METHOD /** * Creates a write stream to the given table. * + *

Sample code: + * + *


+   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
+   *   TableName parent = TableName.of("[PROJECT]", "[DATASET]", "[TABLE]");
+   *   WriteStream writeStream = WriteStream.newBuilder().build();
+   *   WriteStream response = bigQueryWriteClient.createWriteStream(parent, writeStream);
+   * }
+   * 
+ * * @param parent Required. Reference to the table to which the stream belongs, in the format of * `projects/{project}/datasets/{dataset}/tables/{table}`. - * @param write_stream Required. Stream to be created. + * @param writeStream Required. Stream to be created. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ - public final Stream.WriteStream createWriteStream( - TableName parent, Stream.WriteStream writeStream) { - Storage.CreateWriteStreamRequest request = - Storage.CreateWriteStreamRequest.newBuilder() + public final WriteStream createWriteStream(TableName parent, WriteStream writeStream) { + CreateWriteStreamRequest request = + CreateWriteStreamRequest.newBuilder() .setParent(parent == null ? null : parent.toString()) .setWriteStream(writeStream) .build(); return createWriteStream(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD. + // AUTO-GENERATED DOCUMENTATION AND METHOD /** * Creates a write stream to the given table. * + *

Sample code: + * + *


+   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
+   *   TableName parent = TableName.of("[PROJECT]", "[DATASET]", "[TABLE]");
+   *   WriteStream writeStream = WriteStream.newBuilder().build();
+   *   WriteStream response = bigQueryWriteClient.createWriteStream(parent.toString(), writeStream);
+   * }
+   * 
+ * * @param parent Required. Reference to the table to which the stream belongs, in the format of * `projects/{project}/datasets/{dataset}/tables/{table}`. - * @param write_stream Required. Stream to be created. + * @param writeStream Required. Stream to be created. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ - public final Stream.WriteStream createWriteStream(String parent, Stream.WriteStream writeStream) { - Storage.CreateWriteStreamRequest request = - Storage.CreateWriteStreamRequest.newBuilder() - .setParent(parent) - .setWriteStream(writeStream) - .build(); + public final WriteStream createWriteStream(String parent, WriteStream writeStream) { + CreateWriteStreamRequest request = + CreateWriteStreamRequest.newBuilder().setParent(parent).setWriteStream(writeStream).build(); return createWriteStream(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD. + // AUTO-GENERATED DOCUMENTATION AND METHOD /** * Creates a write stream to the given table. * + *

Sample code: + * + *


+   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
+   *   TableName parent = TableName.of("[PROJECT]", "[DATASET]", "[TABLE]");
+   *   WriteStream writeStream = WriteStream.newBuilder().build();
+   *   CreateWriteStreamRequest request = CreateWriteStreamRequest.newBuilder()
+   *     .setParent(parent.toString())
+   *     .setWriteStream(writeStream)
+   *     .build();
+   *   WriteStream response = bigQueryWriteClient.createWriteStream(request);
+   * }
+   * 
+ * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ - public final Stream.WriteStream createWriteStream(Storage.CreateWriteStreamRequest request) { + public final WriteStream createWriteStream(CreateWriteStreamRequest request) { return createWriteStreamCallable().call(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD. + // AUTO-GENERATED DOCUMENTATION AND METHOD /** * Creates a write stream to the given table. * *

Sample code: + * + *


+   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
+   *   TableName parent = TableName.of("[PROJECT]", "[DATASET]", "[TABLE]");
+   *   WriteStream writeStream = WriteStream.newBuilder().build();
+   *   CreateWriteStreamRequest request = CreateWriteStreamRequest.newBuilder()
+   *     .setParent(parent.toString())
+   *     .setWriteStream(writeStream)
+   *     .build();
+   *   ApiFuture<WriteStream> future = bigQueryWriteClient.createWriteStreamCallable().futureCall(request);
+   *   // Do something
+   *   WriteStream response = future.get();
+   * }
+   * 
*/ - public final UnaryCallable - createWriteStreamCallable() { + public final UnaryCallable createWriteStreamCallable() { return stub.createWriteStreamCallable(); } - // AUTO-GENERATED DOCUMENTATION AND METHOD. + // AUTO-GENERATED DOCUMENTATION AND METHOD /** * Appends data to the given stream. * @@ -214,229 +281,396 @@ public final Stream.WriteStream createWriteStream(Storage.CreateWriteStreamReque * the stream is committed. * *

Sample code: + * + *


+   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
+   *   BidiStream<AppendRowsRequest, AppendRowsResponse> bidiStream =
+   *       bigQueryWriteClient.appendRowsCallable().call();
+   *
+   *   WriteStreamName writeStream = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]");
+   *   AppendRowsRequest request = AppendRowsRequest.newBuilder()
+   *     .setWriteStream(writeStream.toString())
+   *     .build();
+   *   bidiStream.send(request);
+   *   for (AppendRowsResponse response : bidiStream) {
+   *     // Do something when receive a response
+   *   }
+   * }
+   * 
*/ - public final BidiStreamingCallable - appendRowsCallable() { + public final BidiStreamingCallable appendRowsCallable() { return stub.appendRowsCallable(); } - // AUTO-GENERATED DOCUMENTATION AND METHOD. + // AUTO-GENERATED DOCUMENTATION AND METHOD /** * Gets a write stream. * + *

Sample code: + * + *


+   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
+   *   WriteStreamName name = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]");
+   *   WriteStream response = bigQueryWriteClient.getWriteStream(name);
+   * }
+   * 
+ * * @param name Required. Name of the stream to get, in the form of * `projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}`. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ - public final Stream.WriteStream getWriteStream(WriteStreamName name) { - Storage.GetWriteStreamRequest request = - Storage.GetWriteStreamRequest.newBuilder() - .setName(name == null ? null : name.toString()) - .build(); + public final WriteStream getWriteStream(WriteStreamName name) { + GetWriteStreamRequest request = + GetWriteStreamRequest.newBuilder().setName(name == null ? null : name.toString()).build(); return getWriteStream(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD. + // AUTO-GENERATED DOCUMENTATION AND METHOD /** * Gets a write stream. * + *

Sample code: + * + *


+   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
+   *   WriteStreamName name = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]");
+   *   WriteStream response = bigQueryWriteClient.getWriteStream(name.toString());
+   * }
+   * 
+ * * @param name Required. Name of the stream to get, in the form of * `projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}`. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ - public final Stream.WriteStream getWriteStream(String name) { - Storage.GetWriteStreamRequest request = - Storage.GetWriteStreamRequest.newBuilder().setName(name).build(); + public final WriteStream getWriteStream(String name) { + GetWriteStreamRequest request = GetWriteStreamRequest.newBuilder().setName(name).build(); return getWriteStream(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD. + // AUTO-GENERATED DOCUMENTATION AND METHOD /** * Gets a write stream. * + *

Sample code: + * + *


+   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
+   *   WriteStreamName name = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]");
+   *   GetWriteStreamRequest request = GetWriteStreamRequest.newBuilder()
+   *     .setName(name.toString())
+   *     .build();
+   *   WriteStream response = bigQueryWriteClient.getWriteStream(request);
+   * }
+   * 
+ * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ - public final Stream.WriteStream getWriteStream(Storage.GetWriteStreamRequest request) { + public final WriteStream getWriteStream(GetWriteStreamRequest request) { return getWriteStreamCallable().call(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD. + // AUTO-GENERATED DOCUMENTATION AND METHOD /** * Gets a write stream. * *

Sample code: + * + *


+   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
+   *   WriteStreamName name = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]");
+   *   GetWriteStreamRequest request = GetWriteStreamRequest.newBuilder()
+   *     .setName(name.toString())
+   *     .build();
+   *   ApiFuture<WriteStream> future = bigQueryWriteClient.getWriteStreamCallable().futureCall(request);
+   *   // Do something
+   *   WriteStream response = future.get();
+   * }
+   * 
*/ - public final UnaryCallable - getWriteStreamCallable() { + public final UnaryCallable getWriteStreamCallable() { return stub.getWriteStreamCallable(); } - // AUTO-GENERATED DOCUMENTATION AND METHOD. + // AUTO-GENERATED DOCUMENTATION AND METHOD /** * Finalize a write stream so that no new data can be appended to the stream. * + *

Sample code: + * + *


+   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
+   *   WriteStreamName name = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]");
+   *   FinalizeWriteStreamResponse response = bigQueryWriteClient.finalizeWriteStream(name);
+   * }
+   * 
+ * * @param name Required. Name of the stream to finalize, in the form of * `projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}`. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ - public final Storage.FinalizeWriteStreamResponse finalizeWriteStream(WriteStreamName name) { - Storage.FinalizeWriteStreamRequest request = - Storage.FinalizeWriteStreamRequest.newBuilder() + public final FinalizeWriteStreamResponse finalizeWriteStream(WriteStreamName name) { + FinalizeWriteStreamRequest request = + FinalizeWriteStreamRequest.newBuilder() .setName(name == null ? null : name.toString()) .build(); return finalizeWriteStream(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD. + // AUTO-GENERATED DOCUMENTATION AND METHOD /** * Finalize a write stream so that no new data can be appended to the stream. * + *

Sample code: + * + *


+   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
+   *   WriteStreamName name = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]");
+   *   FinalizeWriteStreamResponse response = bigQueryWriteClient.finalizeWriteStream(name.toString());
+   * }
+   * 
+ * * @param name Required. Name of the stream to finalize, in the form of * `projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}`. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ - public final Storage.FinalizeWriteStreamResponse finalizeWriteStream(String name) { - Storage.FinalizeWriteStreamRequest request = - Storage.FinalizeWriteStreamRequest.newBuilder().setName(name).build(); + public final FinalizeWriteStreamResponse finalizeWriteStream(String name) { + FinalizeWriteStreamRequest request = + FinalizeWriteStreamRequest.newBuilder().setName(name).build(); return finalizeWriteStream(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD. + // AUTO-GENERATED DOCUMENTATION AND METHOD /** * Finalize a write stream so that no new data can be appended to the stream. * + *

Sample code: + * + *


+   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
+   *   WriteStreamName name = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]");
+   *   FinalizeWriteStreamRequest request = FinalizeWriteStreamRequest.newBuilder()
+   *     .setName(name.toString())
+   *     .build();
+   *   FinalizeWriteStreamResponse response = bigQueryWriteClient.finalizeWriteStream(request);
+   * }
+   * 
+ * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ - public final Storage.FinalizeWriteStreamResponse finalizeWriteStream( - Storage.FinalizeWriteStreamRequest request) { + public final FinalizeWriteStreamResponse finalizeWriteStream(FinalizeWriteStreamRequest request) { return finalizeWriteStreamCallable().call(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD. + // AUTO-GENERATED DOCUMENTATION AND METHOD /** * Finalize a write stream so that no new data can be appended to the stream. * *

Sample code: + * + *


+   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
+   *   WriteStreamName name = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]");
+   *   FinalizeWriteStreamRequest request = FinalizeWriteStreamRequest.newBuilder()
+   *     .setName(name.toString())
+   *     .build();
+   *   ApiFuture<FinalizeWriteStreamResponse> future = bigQueryWriteClient.finalizeWriteStreamCallable().futureCall(request);
+   *   // Do something
+   *   FinalizeWriteStreamResponse response = future.get();
+   * }
+   * 
*/ - public final UnaryCallable< - Storage.FinalizeWriteStreamRequest, Storage.FinalizeWriteStreamResponse> + public final UnaryCallable finalizeWriteStreamCallable() { return stub.finalizeWriteStreamCallable(); } - // AUTO-GENERATED DOCUMENTATION AND METHOD. + // AUTO-GENERATED DOCUMENTATION AND METHOD /** * Atomically commits a group of `PENDING` streams that belong to the same `parent` table. Streams * must be finalized before commit and cannot be committed multiple times. Once a stream is * committed, data in the stream becomes available for read operations. * + *

Sample code: + * + *


+   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
+   *   TableName parent = TableName.of("[PROJECT]", "[DATASET]", "[TABLE]");
+   *   BatchCommitWriteStreamsResponse response = bigQueryWriteClient.batchCommitWriteStreams(parent);
+   * }
+   * 
+ * * @param parent Required. Parent table that all the streams should belong to, in the form of * `projects/{project}/datasets/{dataset}/tables/{table}`. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ - public final Storage.BatchCommitWriteStreamsResponse batchCommitWriteStreams(TableName parent) { - Storage.BatchCommitWriteStreamsRequest request = - Storage.BatchCommitWriteStreamsRequest.newBuilder() + public final BatchCommitWriteStreamsResponse batchCommitWriteStreams(TableName parent) { + BatchCommitWriteStreamsRequest request = + BatchCommitWriteStreamsRequest.newBuilder() .setParent(parent == null ? null : parent.toString()) .build(); return batchCommitWriteStreams(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD. + // AUTO-GENERATED DOCUMENTATION AND METHOD /** * Atomically commits a group of `PENDING` streams that belong to the same `parent` table. Streams * must be finalized before commit and cannot be committed multiple times. Once a stream is * committed, data in the stream becomes available for read operations. * + *

Sample code: + * + *


+   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
+   *   TableName parent = TableName.of("[PROJECT]", "[DATASET]", "[TABLE]");
+   *   BatchCommitWriteStreamsResponse response = bigQueryWriteClient.batchCommitWriteStreams(parent.toString());
+   * }
+   * 
+ * * @param parent Required. Parent table that all the streams should belong to, in the form of * `projects/{project}/datasets/{dataset}/tables/{table}`. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ - public final Storage.BatchCommitWriteStreamsResponse batchCommitWriteStreams(String parent) { - Storage.BatchCommitWriteStreamsRequest request = - Storage.BatchCommitWriteStreamsRequest.newBuilder().setParent(parent).build(); + public final BatchCommitWriteStreamsResponse batchCommitWriteStreams(String parent) { + BatchCommitWriteStreamsRequest request = + BatchCommitWriteStreamsRequest.newBuilder().setParent(parent).build(); return batchCommitWriteStreams(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD. + // AUTO-GENERATED DOCUMENTATION AND METHOD /** * Atomically commits a group of `PENDING` streams that belong to the same `parent` table. Streams * must be finalized before commit and cannot be committed multiple times. Once a stream is * committed, data in the stream becomes available for read operations. * + *

Sample code: + * + *


+   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
+   *   TableName parent = TableName.of("[PROJECT]", "[DATASET]", "[TABLE]");
+   *   List<String> writeStreams = new ArrayList<>();
+   *   BatchCommitWriteStreamsRequest request = BatchCommitWriteStreamsRequest.newBuilder()
+   *     .setParent(parent.toString())
+   *     .addAllWriteStreams(writeStreams)
+   *     .build();
+   *   BatchCommitWriteStreamsResponse response = bigQueryWriteClient.batchCommitWriteStreams(request);
+   * }
+   * 
+ * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ - public final Storage.BatchCommitWriteStreamsResponse batchCommitWriteStreams( - Storage.BatchCommitWriteStreamsRequest request) { + public final BatchCommitWriteStreamsResponse batchCommitWriteStreams( + BatchCommitWriteStreamsRequest request) { return batchCommitWriteStreamsCallable().call(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD. + // AUTO-GENERATED DOCUMENTATION AND METHOD /** * Atomically commits a group of `PENDING` streams that belong to the same `parent` table. Streams * must be finalized before commit and cannot be committed multiple times. Once a stream is * committed, data in the stream becomes available for read operations. * *

Sample code: + * + *


+   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
+   *   TableName parent = TableName.of("[PROJECT]", "[DATASET]", "[TABLE]");
+   *   List<String> writeStreams = new ArrayList<>();
+   *   BatchCommitWriteStreamsRequest request = BatchCommitWriteStreamsRequest.newBuilder()
+   *     .setParent(parent.toString())
+   *     .addAllWriteStreams(writeStreams)
+   *     .build();
+   *   ApiFuture<BatchCommitWriteStreamsResponse> future = bigQueryWriteClient.batchCommitWriteStreamsCallable().futureCall(request);
+   *   // Do something
+   *   BatchCommitWriteStreamsResponse response = future.get();
+   * }
+   * 
*/ - public final UnaryCallable< - Storage.BatchCommitWriteStreamsRequest, Storage.BatchCommitWriteStreamsResponse> + public final UnaryCallable batchCommitWriteStreamsCallable() { return stub.batchCommitWriteStreamsCallable(); } - // AUTO-GENERATED DOCUMENTATION AND METHOD. + // AUTO-GENERATED DOCUMENTATION AND METHOD /** * Flushes rows to a BUFFERED stream. If users are appending rows to BUFFERED stream, flush * operation is required in order for the rows to become available for reading. A Flush operation * flushes up to any previously flushed offset in a BUFFERED stream, to the offset specified in * the request. * - * @param write_stream Required. The stream that is the target of the flush operation. + *

Sample code: + * + *


+   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
+   *   WriteStreamName writeStream = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]");
+   *   FlushRowsResponse response = bigQueryWriteClient.flushRows(writeStream);
+   * }
+   * 
+ * + * @param writeStream Required. The stream that is the target of the flush operation. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ - public final Storage.FlushRowsResponse flushRows(WriteStreamName writeStream) { - Storage.FlushRowsRequest request = - Storage.FlushRowsRequest.newBuilder() + public final FlushRowsResponse flushRows(WriteStreamName writeStream) { + FlushRowsRequest request = + FlushRowsRequest.newBuilder() .setWriteStream(writeStream == null ? null : writeStream.toString()) .build(); return flushRows(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD. + // AUTO-GENERATED DOCUMENTATION AND METHOD /** * Flushes rows to a BUFFERED stream. If users are appending rows to BUFFERED stream, flush * operation is required in order for the rows to become available for reading. A Flush operation * flushes up to any previously flushed offset in a BUFFERED stream, to the offset specified in * the request. * - * @param write_stream Required. The stream that is the target of the flush operation. + *

Sample code: + * + *


+   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
+   *   WriteStreamName writeStream = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]");
+   *   FlushRowsResponse response = bigQueryWriteClient.flushRows(writeStream.toString());
+   * }
+   * 
+ * + * @param writeStream Required. The stream that is the target of the flush operation. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ - public final Storage.FlushRowsResponse flushRows(String writeStream) { - Storage.FlushRowsRequest request = - Storage.FlushRowsRequest.newBuilder().setWriteStream(writeStream).build(); + public final FlushRowsResponse flushRows(String writeStream) { + FlushRowsRequest request = FlushRowsRequest.newBuilder().setWriteStream(writeStream).build(); return flushRows(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD. + // AUTO-GENERATED DOCUMENTATION AND METHOD /** * Flushes rows to a BUFFERED stream. If users are appending rows to BUFFERED stream, flush * operation is required in order for the rows to become available for reading. A Flush operation * flushes up to any previously flushed offset in a BUFFERED stream, to the offset specified in * the request. * + *

Sample code: + * + *


+   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
+   *   WriteStreamName writeStream = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]");
+   *   FlushRowsRequest request = FlushRowsRequest.newBuilder()
+   *     .setWriteStream(writeStream.toString())
+   *     .build();
+   *   FlushRowsResponse response = bigQueryWriteClient.flushRows(request);
+   * }
+   * 
+ * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ - public final Storage.FlushRowsResponse flushRows(Storage.FlushRowsRequest request) { + public final FlushRowsResponse flushRows(FlushRowsRequest request) { return flushRowsCallable().call(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD. + // AUTO-GENERATED DOCUMENTATION AND METHOD /** * Flushes rows to a BUFFERED stream. If users are appending rows to BUFFERED stream, flush * operation is required in order for the rows to become available for reading. A Flush operation @@ -444,9 +678,20 @@ public final Storage.FlushRowsResponse flushRows(Storage.FlushRowsRequest reques * the request. * *

Sample code: + * + *


+   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
+   *   WriteStreamName writeStream = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]");
+   *   FlushRowsRequest request = FlushRowsRequest.newBuilder()
+   *     .setWriteStream(writeStream.toString())
+   *     .build();
+   *   ApiFuture<FlushRowsResponse> future = bigQueryWriteClient.flushRowsCallable().futureCall(request);
+   *   // Do something
+   *   FlushRowsResponse response = future.get();
+   * }
+   * 
*/ - public final UnaryCallable - flushRowsCallable() { + public final UnaryCallable flushRowsCallable() { return stub.flushRowsCallable(); } diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/BigQueryWriteSettings.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/BigQueryWriteSettings.java index 2bfe1ee7e0..a029c17d0e 100644 --- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/BigQueryWriteSettings.java +++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/BigQueryWriteSettings.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,7 +13,6 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - package com.google.cloud.bigquery.storage.v1alpha2; import com.google.api.core.ApiFunction; @@ -27,12 +26,23 @@ import com.google.api.gax.rpc.StreamingCallSettings; import com.google.api.gax.rpc.TransportChannelProvider; import com.google.api.gax.rpc.UnaryCallSettings; +import com.google.cloud.bigquery.storage.v1alpha2.Storage.AppendRowsRequest; +import com.google.cloud.bigquery.storage.v1alpha2.Storage.AppendRowsResponse; +import com.google.cloud.bigquery.storage.v1alpha2.Storage.BatchCommitWriteStreamsRequest; +import com.google.cloud.bigquery.storage.v1alpha2.Storage.BatchCommitWriteStreamsResponse; +import com.google.cloud.bigquery.storage.v1alpha2.Storage.CreateWriteStreamRequest; +import com.google.cloud.bigquery.storage.v1alpha2.Storage.FinalizeWriteStreamRequest; +import com.google.cloud.bigquery.storage.v1alpha2.Storage.FinalizeWriteStreamResponse; +import com.google.cloud.bigquery.storage.v1alpha2.Storage.FlushRowsRequest; +import com.google.cloud.bigquery.storage.v1alpha2.Storage.FlushRowsResponse; +import com.google.cloud.bigquery.storage.v1alpha2.Storage.GetWriteStreamRequest; +import com.google.cloud.bigquery.storage.v1alpha2.Stream.WriteStream; import com.google.cloud.bigquery.storage.v1alpha2.stub.BigQueryWriteStubSettings; import java.io.IOException; import java.util.List; import javax.annotation.Generated; -// AUTO-GENERATED DOCUMENTATION AND CLASS. +// AUTO-GENERATED DOCUMENTATION AND CLASS /** * Settings class to configure an instance of {@link BigQueryWriteClient}. * @@ -50,57 +60,52 @@ * *

For example, to set the total timeout of createWriteStream to 30 seconds: * - *

{@code
- * BigQueryWriteSettings.Builder bigQueryWriteSettingsBuilder = BigQueryWriteSettings.newBuilder();
+ * 
+ * 
+ * BigQueryWriteSettings.Builder bigQueryWriteSettingsBuilder =
+ *     BigQueryWriteSettings.newBuilder();
  * bigQueryWriteSettingsBuilder
  *     .createWriteStreamSettings()
  *     .setRetrySettings(
- *         bigQueryWriteSettingsBuilder
- *             .createWriteStreamSettings()
- *             .getRetrySettings()
- *             .toBuilder()
+ *         bigQueryWriteSettingsBuilder.createWriteStreamSettings().getRetrySettings().toBuilder()
  *             .setTotalTimeout(Duration.ofSeconds(30))
  *             .build());
  * BigQueryWriteSettings bigQueryWriteSettings = bigQueryWriteSettingsBuilder.build();
- * }
+ * + *
*/ -@Generated("by gapic-generator-java") +@Generated("by gapic-generator") +@BetaApi public class BigQueryWriteSettings extends ClientSettings { - /** Returns the object with the settings used for calls to createWriteStream. */ - public UnaryCallSettings - createWriteStreamSettings() { + public UnaryCallSettings createWriteStreamSettings() { return ((BigQueryWriteStubSettings) getStubSettings()).createWriteStreamSettings(); } /** Returns the object with the settings used for calls to appendRows. */ - public StreamingCallSettings - appendRowsSettings() { + public StreamingCallSettings appendRowsSettings() { return ((BigQueryWriteStubSettings) getStubSettings()).appendRowsSettings(); } /** Returns the object with the settings used for calls to getWriteStream. */ - public UnaryCallSettings - getWriteStreamSettings() { + public UnaryCallSettings getWriteStreamSettings() { return ((BigQueryWriteStubSettings) getStubSettings()).getWriteStreamSettings(); } /** Returns the object with the settings used for calls to finalizeWriteStream. */ - public UnaryCallSettings + public UnaryCallSettings finalizeWriteStreamSettings() { return ((BigQueryWriteStubSettings) getStubSettings()).finalizeWriteStreamSettings(); } /** Returns the object with the settings used for calls to batchCommitWriteStreams. */ - public UnaryCallSettings< - Storage.BatchCommitWriteStreamsRequest, Storage.BatchCommitWriteStreamsResponse> + public UnaryCallSettings batchCommitWriteStreamsSettings() { return ((BigQueryWriteStubSettings) getStubSettings()).batchCommitWriteStreamsSettings(); } /** Returns the object with the settings used for calls to flushRows. */ - public UnaryCallSettings - flushRowsSettings() { + public UnaryCallSettings flushRowsSettings() { return ((BigQueryWriteStubSettings) getStubSettings()).flushRowsSettings(); } @@ -164,15 +169,18 @@ protected BigQueryWriteSettings(Builder settingsBuilder) throws IOException { /** Builder for BigQueryWriteSettings. */ public static class Builder extends ClientSettings.Builder { - protected Builder() throws IOException { - this(((ClientContext) null)); + this((ClientContext) null); } protected Builder(ClientContext clientContext) { super(BigQueryWriteStubSettings.newBuilder(clientContext)); } + private static Builder createDefault() { + return new Builder(BigQueryWriteStubSettings.newBuilder()); + } + protected Builder(BigQueryWriteSettings settings) { super(settings.getStubSettings().toBuilder()); } @@ -181,15 +189,11 @@ protected Builder(BigQueryWriteStubSettings.Builder stubSettings) { super(stubSettings); } - private static Builder createDefault() { - return new Builder(BigQueryWriteStubSettings.newBuilder()); - } - public BigQueryWriteStubSettings.Builder getStubSettingsBuilder() { return ((BigQueryWriteStubSettings.Builder) getStubSettings()); } - // NEXT_MAJOR_VER: remove 'throws Exception'. + // NEXT_MAJOR_VER: remove 'throws Exception' /** * Applies the given settings updater function to all of the unary API methods in this service. * @@ -203,40 +207,37 @@ public Builder applyToAllUnaryMethods( } /** Returns the builder for the settings used for calls to createWriteStream. */ - public UnaryCallSettings.Builder + public UnaryCallSettings.Builder createWriteStreamSettings() { return getStubSettingsBuilder().createWriteStreamSettings(); } /** Returns the builder for the settings used for calls to appendRows. */ - public StreamingCallSettings.Builder + public StreamingCallSettings.Builder appendRowsSettings() { return getStubSettingsBuilder().appendRowsSettings(); } /** Returns the builder for the settings used for calls to getWriteStream. */ - public UnaryCallSettings.Builder - getWriteStreamSettings() { + public UnaryCallSettings.Builder getWriteStreamSettings() { return getStubSettingsBuilder().getWriteStreamSettings(); } /** Returns the builder for the settings used for calls to finalizeWriteStream. */ - public UnaryCallSettings.Builder< - Storage.FinalizeWriteStreamRequest, Storage.FinalizeWriteStreamResponse> + public UnaryCallSettings.Builder finalizeWriteStreamSettings() { return getStubSettingsBuilder().finalizeWriteStreamSettings(); } /** Returns the builder for the settings used for calls to batchCommitWriteStreams. */ public UnaryCallSettings.Builder< - Storage.BatchCommitWriteStreamsRequest, Storage.BatchCommitWriteStreamsResponse> + BatchCommitWriteStreamsRequest, BatchCommitWriteStreamsResponse> batchCommitWriteStreamsSettings() { return getStubSettingsBuilder().batchCommitWriteStreamsSettings(); } /** Returns the builder for the settings used for calls to flushRows. */ - public UnaryCallSettings.Builder - flushRowsSettings() { + public UnaryCallSettings.Builder flushRowsSettings() { return getStubSettingsBuilder().flushRowsSettings(); } diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/package-info.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/package-info.java index 561987d3b3..d5a0a66695 100644 --- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/package-info.java +++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/package-info.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,17 +15,29 @@ */ /** - * The interfaces provided are listed below, along with usage samples. + * A client to BigQuery Storage API. * - *

======================= BigQueryWriteClient ======================= + *

The interfaces provided are listed below, along with usage samples. + * + *

=================== BigQueryWriteClient =================== * *

Service Description: BigQuery Write API. * *

The Write API can be used to write data to BigQuery. * *

Sample for BigQueryWriteClient: + * + *

+ * 
+ * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
+ *   TableName parent = TableName.of("[PROJECT]", "[DATASET]", "[TABLE]");
+ *   WriteStream writeStream = WriteStream.newBuilder().build();
+ *   WriteStream response = bigQueryWriteClient.createWriteStream(parent, writeStream);
+ * }
+ * 
+ * 
*/ -@Generated("by gapic-generator-java") +@Generated("by gapic-generator") package com.google.cloud.bigquery.storage.v1alpha2; import javax.annotation.Generated; diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/stub/BigQueryWriteStub.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/stub/BigQueryWriteStub.java index 27ef0b03d7..c86dcd8a28 100644 --- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/stub/BigQueryWriteStub.java +++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/stub/BigQueryWriteStub.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,51 +13,58 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - package com.google.cloud.bigquery.storage.v1alpha2.stub; +import com.google.api.core.BetaApi; import com.google.api.gax.core.BackgroundResource; import com.google.api.gax.rpc.BidiStreamingCallable; import com.google.api.gax.rpc.UnaryCallable; -import com.google.cloud.bigquery.storage.v1alpha2.Storage; -import com.google.cloud.bigquery.storage.v1alpha2.Stream; +import com.google.cloud.bigquery.storage.v1alpha2.Storage.AppendRowsRequest; +import com.google.cloud.bigquery.storage.v1alpha2.Storage.AppendRowsResponse; +import com.google.cloud.bigquery.storage.v1alpha2.Storage.BatchCommitWriteStreamsRequest; +import com.google.cloud.bigquery.storage.v1alpha2.Storage.BatchCommitWriteStreamsResponse; +import com.google.cloud.bigquery.storage.v1alpha2.Storage.CreateWriteStreamRequest; +import com.google.cloud.bigquery.storage.v1alpha2.Storage.FinalizeWriteStreamRequest; +import com.google.cloud.bigquery.storage.v1alpha2.Storage.FinalizeWriteStreamResponse; +import com.google.cloud.bigquery.storage.v1alpha2.Storage.FlushRowsRequest; +import com.google.cloud.bigquery.storage.v1alpha2.Storage.FlushRowsResponse; +import com.google.cloud.bigquery.storage.v1alpha2.Storage.GetWriteStreamRequest; +import com.google.cloud.bigquery.storage.v1alpha2.Stream.WriteStream; import javax.annotation.Generated; -// AUTO-GENERATED DOCUMENTATION AND CLASS. +// AUTO-GENERATED DOCUMENTATION AND CLASS /** - * Base stub class for the BigQueryWrite service API. + * Base stub class for BigQuery Storage API. * *

This class is for advanced usage and reflects the underlying API directly. */ @Generated("by gapic-generator") +@BetaApi("A restructuring of stub classes is planned, so this may break in the future") public abstract class BigQueryWriteStub implements BackgroundResource { - public UnaryCallable - createWriteStreamCallable() { + public UnaryCallable createWriteStreamCallable() { throw new UnsupportedOperationException("Not implemented: createWriteStreamCallable()"); } - public BidiStreamingCallable - appendRowsCallable() { + public BidiStreamingCallable appendRowsCallable() { throw new UnsupportedOperationException("Not implemented: appendRowsCallable()"); } - public UnaryCallable getWriteStreamCallable() { + public UnaryCallable getWriteStreamCallable() { throw new UnsupportedOperationException("Not implemented: getWriteStreamCallable()"); } - public UnaryCallable + public UnaryCallable finalizeWriteStreamCallable() { throw new UnsupportedOperationException("Not implemented: finalizeWriteStreamCallable()"); } - public UnaryCallable< - Storage.BatchCommitWriteStreamsRequest, Storage.BatchCommitWriteStreamsResponse> + public UnaryCallable batchCommitWriteStreamsCallable() { throw new UnsupportedOperationException("Not implemented: batchCommitWriteStreamsCallable()"); } - public UnaryCallable flushRowsCallable() { + public UnaryCallable flushRowsCallable() { throw new UnsupportedOperationException("Not implemented: flushRowsCallable()"); } diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/stub/BigQueryWriteStubSettings.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/stub/BigQueryWriteStubSettings.java index f1d4fc571d..09fd472e31 100644 --- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/stub/BigQueryWriteStubSettings.java +++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/stub/BigQueryWriteStubSettings.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,7 +13,6 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - package com.google.cloud.bigquery.storage.v1alpha2.stub; import com.google.api.core.ApiFunction; @@ -32,8 +31,17 @@ import com.google.api.gax.rpc.StubSettings; import com.google.api.gax.rpc.TransportChannelProvider; import com.google.api.gax.rpc.UnaryCallSettings; -import com.google.cloud.bigquery.storage.v1alpha2.Storage; -import com.google.cloud.bigquery.storage.v1alpha2.Stream; +import com.google.cloud.bigquery.storage.v1alpha2.Storage.AppendRowsRequest; +import com.google.cloud.bigquery.storage.v1alpha2.Storage.AppendRowsResponse; +import com.google.cloud.bigquery.storage.v1alpha2.Storage.BatchCommitWriteStreamsRequest; +import com.google.cloud.bigquery.storage.v1alpha2.Storage.BatchCommitWriteStreamsResponse; +import com.google.cloud.bigquery.storage.v1alpha2.Storage.CreateWriteStreamRequest; +import com.google.cloud.bigquery.storage.v1alpha2.Storage.FinalizeWriteStreamRequest; +import com.google.cloud.bigquery.storage.v1alpha2.Storage.FinalizeWriteStreamResponse; +import com.google.cloud.bigquery.storage.v1alpha2.Storage.FlushRowsRequest; +import com.google.cloud.bigquery.storage.v1alpha2.Storage.FlushRowsResponse; +import com.google.cloud.bigquery.storage.v1alpha2.Storage.GetWriteStreamRequest; +import com.google.cloud.bigquery.storage.v1alpha2.Stream.WriteStream; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; @@ -43,7 +51,7 @@ import javax.annotation.Generated; import org.threeten.bp.Duration; -// AUTO-GENERATED DOCUMENTATION AND CLASS. +// AUTO-GENERATED DOCUMENTATION AND CLASS /** * Settings class to configure an instance of {@link BigQueryWriteStub}. * @@ -61,23 +69,22 @@ * *

For example, to set the total timeout of createWriteStream to 30 seconds: * - *

{@code
+ * 
+ * 
  * BigQueryWriteStubSettings.Builder bigQueryWriteSettingsBuilder =
  *     BigQueryWriteStubSettings.newBuilder();
  * bigQueryWriteSettingsBuilder
  *     .createWriteStreamSettings()
  *     .setRetrySettings(
- *         bigQueryWriteSettingsBuilder
- *             .createWriteStreamSettings()
- *             .getRetrySettings()
- *             .toBuilder()
+ *         bigQueryWriteSettingsBuilder.createWriteStreamSettings().getRetrySettings().toBuilder()
  *             .setTotalTimeout(Duration.ofSeconds(30))
  *             .build());
  * BigQueryWriteStubSettings bigQueryWriteSettings = bigQueryWriteSettingsBuilder.build();
- * }
+ * + *
*/ +@Generated("by gapic-generator") @BetaApi -@Generated("by gapic-generator-java") public class BigQueryWriteStubSettings extends StubSettings { /** The default scopes of the service. */ private static final ImmutableList DEFAULT_SERVICE_SCOPES = @@ -87,55 +94,44 @@ public class BigQueryWriteStubSettings extends StubSettings - createWriteStreamSettings; - private final StreamingCallSettings - appendRowsSettings; - private final UnaryCallSettings - getWriteStreamSettings; - private final UnaryCallSettings< - Storage.FinalizeWriteStreamRequest, Storage.FinalizeWriteStreamResponse> + private final UnaryCallSettings createWriteStreamSettings; + private final StreamingCallSettings appendRowsSettings; + private final UnaryCallSettings getWriteStreamSettings; + private final UnaryCallSettings finalizeWriteStreamSettings; - private final UnaryCallSettings< - Storage.BatchCommitWriteStreamsRequest, Storage.BatchCommitWriteStreamsResponse> + private final UnaryCallSettings batchCommitWriteStreamsSettings; - private final UnaryCallSettings - flushRowsSettings; + private final UnaryCallSettings flushRowsSettings; /** Returns the object with the settings used for calls to createWriteStream. */ - public UnaryCallSettings - createWriteStreamSettings() { + public UnaryCallSettings createWriteStreamSettings() { return createWriteStreamSettings; } /** Returns the object with the settings used for calls to appendRows. */ - public StreamingCallSettings - appendRowsSettings() { + public StreamingCallSettings appendRowsSettings() { return appendRowsSettings; } /** Returns the object with the settings used for calls to getWriteStream. */ - public UnaryCallSettings - getWriteStreamSettings() { + public UnaryCallSettings getWriteStreamSettings() { return getWriteStreamSettings; } /** Returns the object with the settings used for calls to finalizeWriteStream. */ - public UnaryCallSettings + public UnaryCallSettings finalizeWriteStreamSettings() { return finalizeWriteStreamSettings; } /** Returns the object with the settings used for calls to batchCommitWriteStreams. */ - public UnaryCallSettings< - Storage.BatchCommitWriteStreamsRequest, Storage.BatchCommitWriteStreamsResponse> + public UnaryCallSettings batchCommitWriteStreamsSettings() { return batchCommitWriteStreamsSettings; } /** Returns the object with the settings used for calls to flushRows. */ - public UnaryCallSettings - flushRowsSettings() { + public UnaryCallSettings flushRowsSettings() { return flushRowsSettings; } @@ -145,10 +141,10 @@ public BigQueryWriteStub createStub() throws IOException { .getTransportName() .equals(GrpcTransportChannel.getGrpcTransportName())) { return GrpcBigQueryWriteStub.create(this); + } else { + throw new UnsupportedOperationException( + "Transport not supported: " + getTransportChannelProvider().getTransportName()); } - throw new UnsupportedOperationException( - String.format( - "Transport not supported: %s", getTransportChannelProvider().getTransportName())); } /** Returns a builder for the default ExecutorProvider for this service. */ @@ -219,21 +215,20 @@ protected BigQueryWriteStubSettings(Builder settingsBuilder) throws IOException /** Builder for BigQueryWriteStubSettings. */ public static class Builder extends StubSettings.Builder { private final ImmutableList> unaryMethodSettingsBuilders; - private final UnaryCallSettings.Builder + + private final UnaryCallSettings.Builder createWriteStreamSettings; - private final StreamingCallSettings.Builder< - Storage.AppendRowsRequest, Storage.AppendRowsResponse> + private final StreamingCallSettings.Builder appendRowsSettings; - private final UnaryCallSettings.Builder + private final UnaryCallSettings.Builder getWriteStreamSettings; - private final UnaryCallSettings.Builder< - Storage.FinalizeWriteStreamRequest, Storage.FinalizeWriteStreamResponse> + private final UnaryCallSettings.Builder finalizeWriteStreamSettings; private final UnaryCallSettings.Builder< - Storage.BatchCommitWriteStreamsRequest, Storage.BatchCommitWriteStreamsResponse> + BatchCommitWriteStreamsRequest, BatchCommitWriteStreamsResponse> batchCommitWriteStreamsSettings; - private final UnaryCallSettings.Builder - flushRowsSettings; + private final UnaryCallSettings.Builder flushRowsSettings; + private static final ImmutableMap> RETRYABLE_CODE_DEFINITIONS; @@ -241,23 +236,23 @@ public static class Builder extends StubSettings.Builder> definitions = ImmutableMap.builder(); definitions.put( - "retry_policy_0_codes", + "retry_policy_1_codes", ImmutableSet.copyOf( Lists.newArrayList( StatusCode.Code.DEADLINE_EXCEEDED, StatusCode.Code.UNAVAILABLE, StatusCode.Code.RESOURCE_EXHAUSTED))); + definitions.put("no_retry_codes", ImmutableSet.copyOf(Lists.newArrayList())); definitions.put( - "retry_policy_1_codes", + "retry_policy_3_codes", ImmutableSet.copyOf( Lists.newArrayList( - StatusCode.Code.UNAVAILABLE, StatusCode.Code.RESOURCE_EXHAUSTED))); + StatusCode.Code.DEADLINE_EXCEEDED, StatusCode.Code.UNAVAILABLE))); definitions.put( "retry_policy_2_codes", ImmutableSet.copyOf( Lists.newArrayList( - StatusCode.Code.DEADLINE_EXCEEDED, StatusCode.Code.UNAVAILABLE))); - definitions.put("no_retry_codes", ImmutableSet.copyOf(Lists.newArrayList())); + StatusCode.Code.UNAVAILABLE, StatusCode.Code.RESOURCE_EXHAUSTED))); RETRYABLE_CODE_DEFINITIONS = definitions.build(); } @@ -276,7 +271,7 @@ public static class Builder extends StubSettings.Builder>of( - createWriteStreamSettings, - getWriteStreamSettings, - finalizeWriteStreamSettings, - batchCommitWriteStreamsSettings, - flushRowsSettings); + initDefaults(this); } private static Builder createDefault() { - Builder builder = new Builder(((ClientContext) null)); - + Builder builder = new Builder((ClientContext) null); builder.setTransportChannelProvider(defaultTransportChannelProvider()); builder.setCredentialsProvider(defaultCredentialsProviderBuilder().build()); builder.setInternalHeaderProvider(defaultApiClientHeaderProviderBuilder().build()); builder.setEndpoint(getDefaultEndpoint()); - return initDefaults(builder); } private static Builder initDefaults(Builder builder) { + builder .createWriteStreamSettings() - .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) - .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")); + .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_1_codes")) + .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_1_params")); builder .getWriteStreamSettings() - .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_2_codes")) - .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_2_params")); + .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_3_codes")) + .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_3_params")); builder .finalizeWriteStreamSettings() - .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_2_codes")) - .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_2_params")); + .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_3_codes")) + .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_3_params")); builder .batchCommitWriteStreamsSettings() - .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_2_codes")) - .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_2_params")); + .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_3_codes")) + .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_3_params")); builder .flushRowsSettings() @@ -387,7 +368,26 @@ private static Builder initDefaults(Builder builder) { return builder; } - // NEXT_MAJOR_VER: remove 'throws Exception'. + protected Builder(BigQueryWriteStubSettings settings) { + super(settings); + + createWriteStreamSettings = settings.createWriteStreamSettings.toBuilder(); + appendRowsSettings = settings.appendRowsSettings.toBuilder(); + getWriteStreamSettings = settings.getWriteStreamSettings.toBuilder(); + finalizeWriteStreamSettings = settings.finalizeWriteStreamSettings.toBuilder(); + batchCommitWriteStreamsSettings = settings.batchCommitWriteStreamsSettings.toBuilder(); + flushRowsSettings = settings.flushRowsSettings.toBuilder(); + + unaryMethodSettingsBuilders = + ImmutableList.>of( + createWriteStreamSettings, + getWriteStreamSettings, + finalizeWriteStreamSettings, + batchCommitWriteStreamsSettings, + flushRowsSettings); + } + + // NEXT_MAJOR_VER: remove 'throws Exception' /** * Applies the given settings updater function to all of the unary API methods in this service. * @@ -404,40 +404,37 @@ public Builder applyToAllUnaryMethods( } /** Returns the builder for the settings used for calls to createWriteStream. */ - public UnaryCallSettings.Builder + public UnaryCallSettings.Builder createWriteStreamSettings() { return createWriteStreamSettings; } /** Returns the builder for the settings used for calls to appendRows. */ - public StreamingCallSettings.Builder + public StreamingCallSettings.Builder appendRowsSettings() { return appendRowsSettings; } /** Returns the builder for the settings used for calls to getWriteStream. */ - public UnaryCallSettings.Builder - getWriteStreamSettings() { + public UnaryCallSettings.Builder getWriteStreamSettings() { return getWriteStreamSettings; } /** Returns the builder for the settings used for calls to finalizeWriteStream. */ - public UnaryCallSettings.Builder< - Storage.FinalizeWriteStreamRequest, Storage.FinalizeWriteStreamResponse> + public UnaryCallSettings.Builder finalizeWriteStreamSettings() { return finalizeWriteStreamSettings; } /** Returns the builder for the settings used for calls to batchCommitWriteStreams. */ public UnaryCallSettings.Builder< - Storage.BatchCommitWriteStreamsRequest, Storage.BatchCommitWriteStreamsResponse> + BatchCommitWriteStreamsRequest, BatchCommitWriteStreamsResponse> batchCommitWriteStreamsSettings() { return batchCommitWriteStreamsSettings; } /** Returns the builder for the settings used for calls to flushRows. */ - public UnaryCallSettings.Builder - flushRowsSettings() { + public UnaryCallSettings.Builder flushRowsSettings() { return flushRowsSettings; } diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/stub/GrpcBigQueryWriteCallableFactory.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/stub/GrpcBigQueryWriteCallableFactory.java index f80038c9ae..e1e5621cdf 100644 --- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/stub/GrpcBigQueryWriteCallableFactory.java +++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/stub/GrpcBigQueryWriteCallableFactory.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,9 +13,9 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - package com.google.cloud.bigquery.storage.v1alpha2.stub; +import com.google.api.core.BetaApi; import com.google.api.gax.grpc.GrpcCallSettings; import com.google.api.gax.grpc.GrpcCallableFactory; import com.google.api.gax.grpc.GrpcStubCallableFactory; @@ -31,19 +31,18 @@ import com.google.api.gax.rpc.StreamingCallSettings; import com.google.api.gax.rpc.UnaryCallSettings; import com.google.api.gax.rpc.UnaryCallable; -import com.google.longrunning.Operation; import com.google.longrunning.stub.OperationsStub; import javax.annotation.Generated; -// AUTO-GENERATED DOCUMENTATION AND CLASS. +// AUTO-GENERATED DOCUMENTATION AND CLASS /** - * gRPC callable factory implementation for the BigQueryWrite service API. + * gRPC callable factory implementation for BigQuery Storage API. * *

This class is for advanced usage. */ @Generated("by gapic-generator") +@BetaApi("The surface for use by generated code is not stable yet and may change in the future.") public class GrpcBigQueryWriteCallableFactory implements GrpcStubCallableFactory { - @Override public UnaryCallable createUnaryCallable( GrpcCallSettings grpcCallSettings, @@ -56,58 +55,61 @@ public UnaryCallable createUnaryCalla public UnaryCallable createPagedCallable( GrpcCallSettings grpcCallSettings, - PagedCallSettings callSettings, + PagedCallSettings pagedCallSettings, ClientContext clientContext) { - return GrpcCallableFactory.createPagedCallable(grpcCallSettings, callSettings, clientContext); + return GrpcCallableFactory.createPagedCallable( + grpcCallSettings, pagedCallSettings, clientContext); } @Override public UnaryCallable createBatchingCallable( GrpcCallSettings grpcCallSettings, - BatchingCallSettings callSettings, + BatchingCallSettings batchingCallSettings, ClientContext clientContext) { return GrpcCallableFactory.createBatchingCallable( - grpcCallSettings, callSettings, clientContext); + grpcCallSettings, batchingCallSettings, clientContext); } + @BetaApi( + "The surface for long-running operations is not stable yet and may change in the future.") @Override public OperationCallable createOperationCallable( - GrpcCallSettings grpcCallSettings, - OperationCallSettings callSettings, + GrpcCallSettings grpcCallSettings, + OperationCallSettings operationCallSettings, ClientContext clientContext, OperationsStub operationsStub) { return GrpcCallableFactory.createOperationCallable( - grpcCallSettings, callSettings, clientContext, operationsStub); + grpcCallSettings, operationCallSettings, clientContext, operationsStub); } @Override public BidiStreamingCallable createBidiStreamingCallable( GrpcCallSettings grpcCallSettings, - StreamingCallSettings callSettings, + StreamingCallSettings streamingCallSettings, ClientContext clientContext) { return GrpcCallableFactory.createBidiStreamingCallable( - grpcCallSettings, callSettings, clientContext); + grpcCallSettings, streamingCallSettings, clientContext); } @Override public ServerStreamingCallable createServerStreamingCallable( GrpcCallSettings grpcCallSettings, - ServerStreamingCallSettings callSettings, + ServerStreamingCallSettings streamingCallSettings, ClientContext clientContext) { return GrpcCallableFactory.createServerStreamingCallable( - grpcCallSettings, callSettings, clientContext); + grpcCallSettings, streamingCallSettings, clientContext); } @Override public ClientStreamingCallable createClientStreamingCallable( GrpcCallSettings grpcCallSettings, - StreamingCallSettings callSettings, + StreamingCallSettings streamingCallSettings, ClientContext clientContext) { return GrpcCallableFactory.createClientStreamingCallable( - grpcCallSettings, callSettings, clientContext); + grpcCallSettings, streamingCallSettings, clientContext); } } diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/stub/GrpcBigQueryWriteStub.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/stub/GrpcBigQueryWriteStub.java index ccb0b89388..7729ba6f9b 100644 --- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/stub/GrpcBigQueryWriteStub.java +++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/stub/GrpcBigQueryWriteStub.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,9 +13,9 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - package com.google.cloud.bigquery.storage.v1alpha2.stub; +import com.google.api.core.BetaApi; import com.google.api.gax.core.BackgroundResource; import com.google.api.gax.core.BackgroundResourceAggregation; import com.google.api.gax.grpc.GrpcCallSettings; @@ -24,10 +24,18 @@ import com.google.api.gax.rpc.ClientContext; import com.google.api.gax.rpc.RequestParamsExtractor; import com.google.api.gax.rpc.UnaryCallable; -import com.google.cloud.bigquery.storage.v1alpha2.Storage; -import com.google.cloud.bigquery.storage.v1alpha2.Stream; +import com.google.cloud.bigquery.storage.v1alpha2.Storage.AppendRowsRequest; +import com.google.cloud.bigquery.storage.v1alpha2.Storage.AppendRowsResponse; +import com.google.cloud.bigquery.storage.v1alpha2.Storage.BatchCommitWriteStreamsRequest; +import com.google.cloud.bigquery.storage.v1alpha2.Storage.BatchCommitWriteStreamsResponse; +import com.google.cloud.bigquery.storage.v1alpha2.Storage.CreateWriteStreamRequest; +import com.google.cloud.bigquery.storage.v1alpha2.Storage.FinalizeWriteStreamRequest; +import com.google.cloud.bigquery.storage.v1alpha2.Storage.FinalizeWriteStreamResponse; +import com.google.cloud.bigquery.storage.v1alpha2.Storage.FlushRowsRequest; +import com.google.cloud.bigquery.storage.v1alpha2.Storage.FlushRowsResponse; +import com.google.cloud.bigquery.storage.v1alpha2.Storage.GetWriteStreamRequest; +import com.google.cloud.bigquery.storage.v1alpha2.Stream.WriteStream; import com.google.common.collect.ImmutableMap; -import com.google.longrunning.stub.GrpcOperationsStub; import io.grpc.MethodDescriptor; import io.grpc.protobuf.ProtoUtils; import java.io.IOException; @@ -35,106 +43,88 @@ import java.util.concurrent.TimeUnit; import javax.annotation.Generated; -// AUTO-GENERATED DOCUMENTATION AND CLASS. +// AUTO-GENERATED DOCUMENTATION AND CLASS /** - * gRPC stub implementation for the BigQueryWrite service API. + * gRPC stub implementation for BigQuery Storage API. * *

This class is for advanced usage and reflects the underlying API directly. */ -@Generated("by gapic-generator-java") +@Generated("by gapic-generator") +@BetaApi("A restructuring of stub classes is planned, so this may break in the future") public class GrpcBigQueryWriteStub extends BigQueryWriteStub { - private static final MethodDescriptor + + private static final MethodDescriptor createWriteStreamMethodDescriptor = - MethodDescriptor.newBuilder() + MethodDescriptor.newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName( "google.cloud.bigquery.storage.v1alpha2.BigQueryWrite/CreateWriteStream") .setRequestMarshaller( - ProtoUtils.marshaller(Storage.CreateWriteStreamRequest.getDefaultInstance())) - .setResponseMarshaller(ProtoUtils.marshaller(Stream.WriteStream.getDefaultInstance())) + ProtoUtils.marshaller(CreateWriteStreamRequest.getDefaultInstance())) + .setResponseMarshaller(ProtoUtils.marshaller(WriteStream.getDefaultInstance())) .build(); - - private static final MethodDescriptor + private static final MethodDescriptor appendRowsMethodDescriptor = - MethodDescriptor.newBuilder() + MethodDescriptor.newBuilder() .setType(MethodDescriptor.MethodType.BIDI_STREAMING) .setFullMethodName("google.cloud.bigquery.storage.v1alpha2.BigQueryWrite/AppendRows") - .setRequestMarshaller( - ProtoUtils.marshaller(Storage.AppendRowsRequest.getDefaultInstance())) - .setResponseMarshaller( - ProtoUtils.marshaller(Storage.AppendRowsResponse.getDefaultInstance())) + .setRequestMarshaller(ProtoUtils.marshaller(AppendRowsRequest.getDefaultInstance())) + .setResponseMarshaller(ProtoUtils.marshaller(AppendRowsResponse.getDefaultInstance())) .build(); - - private static final MethodDescriptor + private static final MethodDescriptor getWriteStreamMethodDescriptor = - MethodDescriptor.newBuilder() + MethodDescriptor.newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName( "google.cloud.bigquery.storage.v1alpha2.BigQueryWrite/GetWriteStream") .setRequestMarshaller( - ProtoUtils.marshaller(Storage.GetWriteStreamRequest.getDefaultInstance())) - .setResponseMarshaller(ProtoUtils.marshaller(Stream.WriteStream.getDefaultInstance())) + ProtoUtils.marshaller(GetWriteStreamRequest.getDefaultInstance())) + .setResponseMarshaller(ProtoUtils.marshaller(WriteStream.getDefaultInstance())) .build(); - - private static final MethodDescriptor< - Storage.FinalizeWriteStreamRequest, Storage.FinalizeWriteStreamResponse> + private static final MethodDescriptor finalizeWriteStreamMethodDescriptor = - MethodDescriptor - .newBuilder() + MethodDescriptor.newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName( "google.cloud.bigquery.storage.v1alpha2.BigQueryWrite/FinalizeWriteStream") .setRequestMarshaller( - ProtoUtils.marshaller(Storage.FinalizeWriteStreamRequest.getDefaultInstance())) + ProtoUtils.marshaller(FinalizeWriteStreamRequest.getDefaultInstance())) .setResponseMarshaller( - ProtoUtils.marshaller(Storage.FinalizeWriteStreamResponse.getDefaultInstance())) + ProtoUtils.marshaller(FinalizeWriteStreamResponse.getDefaultInstance())) .build(); - private static final MethodDescriptor< - Storage.BatchCommitWriteStreamsRequest, Storage.BatchCommitWriteStreamsResponse> + BatchCommitWriteStreamsRequest, BatchCommitWriteStreamsResponse> batchCommitWriteStreamsMethodDescriptor = MethodDescriptor - . - newBuilder() + .newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName( "google.cloud.bigquery.storage.v1alpha2.BigQueryWrite/BatchCommitWriteStreams") .setRequestMarshaller( - ProtoUtils.marshaller( - Storage.BatchCommitWriteStreamsRequest.getDefaultInstance())) + ProtoUtils.marshaller(BatchCommitWriteStreamsRequest.getDefaultInstance())) .setResponseMarshaller( - ProtoUtils.marshaller( - Storage.BatchCommitWriteStreamsResponse.getDefaultInstance())) + ProtoUtils.marshaller(BatchCommitWriteStreamsResponse.getDefaultInstance())) .build(); - - private static final MethodDescriptor + private static final MethodDescriptor flushRowsMethodDescriptor = - MethodDescriptor.newBuilder() + MethodDescriptor.newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName("google.cloud.bigquery.storage.v1alpha2.BigQueryWrite/FlushRows") - .setRequestMarshaller( - ProtoUtils.marshaller(Storage.FlushRowsRequest.getDefaultInstance())) - .setResponseMarshaller( - ProtoUtils.marshaller(Storage.FlushRowsResponse.getDefaultInstance())) + .setRequestMarshaller(ProtoUtils.marshaller(FlushRowsRequest.getDefaultInstance())) + .setResponseMarshaller(ProtoUtils.marshaller(FlushRowsResponse.getDefaultInstance())) .build(); - private final UnaryCallable - createWriteStreamCallable; - private final BidiStreamingCallable - appendRowsCallable; - private final UnaryCallable - getWriteStreamCallable; - private final UnaryCallable< - Storage.FinalizeWriteStreamRequest, Storage.FinalizeWriteStreamResponse> + private final BackgroundResource backgroundResources; + + private final UnaryCallable createWriteStreamCallable; + private final BidiStreamingCallable appendRowsCallable; + private final UnaryCallable getWriteStreamCallable; + private final UnaryCallable finalizeWriteStreamCallable; - private final UnaryCallable< - Storage.BatchCommitWriteStreamsRequest, Storage.BatchCommitWriteStreamsResponse> + private final UnaryCallable batchCommitWriteStreamsCallable; - private final UnaryCallable - flushRowsCallable; + private final UnaryCallable flushRowsCallable; - private final BackgroundResource backgroundResources; - private final GrpcOperationsStub operationsStub; private final GrpcStubCallableFactory callableFactory; public static final GrpcBigQueryWriteStub create(BigQueryWriteStubSettings settings) @@ -152,110 +142,100 @@ public static final GrpcBigQueryWriteStub create( BigQueryWriteStubSettings.newBuilder().build(), clientContext, callableFactory); } + /** + * Constructs an instance of GrpcBigQueryWriteStub, using the given settings. This is protected so + * that it is easy to make a subclass, but otherwise, the static factory methods should be + * preferred. + */ protected GrpcBigQueryWriteStub(BigQueryWriteStubSettings settings, ClientContext clientContext) throws IOException { this(settings, clientContext, new GrpcBigQueryWriteCallableFactory()); } + /** + * Constructs an instance of GrpcBigQueryWriteStub, using the given settings. This is protected so + * that it is easy to make a subclass, but otherwise, the static factory methods should be + * preferred. + */ protected GrpcBigQueryWriteStub( BigQueryWriteStubSettings settings, ClientContext clientContext, GrpcStubCallableFactory callableFactory) throws IOException { this.callableFactory = callableFactory; - this.operationsStub = GrpcOperationsStub.create(clientContext, callableFactory); - GrpcCallSettings - createWriteStreamTransportSettings = - GrpcCallSettings.newBuilder() - .setMethodDescriptor(createWriteStreamMethodDescriptor) - .setParamsExtractor( - new RequestParamsExtractor() { - @Override - public Map extract(Storage.CreateWriteStreamRequest request) { - ImmutableMap.Builder params = ImmutableMap.builder(); - params.put("parent", String.valueOf(request.getParent())); - return params.build(); - } - }) - .build(); - GrpcCallSettings - appendRowsTransportSettings = - GrpcCallSettings.newBuilder() - .setMethodDescriptor(appendRowsMethodDescriptor) - .setParamsExtractor( - new RequestParamsExtractor() { - @Override - public Map extract(Storage.AppendRowsRequest request) { - ImmutableMap.Builder params = ImmutableMap.builder(); - params.put("write_stream", String.valueOf(request.getWriteStream())); - return params.build(); - } - }) - .build(); - GrpcCallSettings - getWriteStreamTransportSettings = - GrpcCallSettings.newBuilder() - .setMethodDescriptor(getWriteStreamMethodDescriptor) - .setParamsExtractor( - new RequestParamsExtractor() { - @Override - public Map extract(Storage.GetWriteStreamRequest request) { - ImmutableMap.Builder params = ImmutableMap.builder(); - params.put("name", String.valueOf(request.getName())); - return params.build(); - } - }) - .build(); - GrpcCallSettings + GrpcCallSettings createWriteStreamTransportSettings = + GrpcCallSettings.newBuilder() + .setMethodDescriptor(createWriteStreamMethodDescriptor) + .setParamsExtractor( + new RequestParamsExtractor() { + @Override + public Map extract(CreateWriteStreamRequest request) { + ImmutableMap.Builder params = ImmutableMap.builder(); + params.put("parent", String.valueOf(request.getParent())); + return params.build(); + } + }) + .build(); + GrpcCallSettings appendRowsTransportSettings = + GrpcCallSettings.newBuilder() + .setMethodDescriptor(appendRowsMethodDescriptor) + .build(); + GrpcCallSettings getWriteStreamTransportSettings = + GrpcCallSettings.newBuilder() + .setMethodDescriptor(getWriteStreamMethodDescriptor) + .setParamsExtractor( + new RequestParamsExtractor() { + @Override + public Map extract(GetWriteStreamRequest request) { + ImmutableMap.Builder params = ImmutableMap.builder(); + params.put("name", String.valueOf(request.getName())); + return params.build(); + } + }) + .build(); + GrpcCallSettings finalizeWriteStreamTransportSettings = - GrpcCallSettings - . - newBuilder() + GrpcCallSettings.newBuilder() .setMethodDescriptor(finalizeWriteStreamMethodDescriptor) .setParamsExtractor( - new RequestParamsExtractor() { + new RequestParamsExtractor() { @Override - public Map extract( - Storage.FinalizeWriteStreamRequest request) { + public Map extract(FinalizeWriteStreamRequest request) { ImmutableMap.Builder params = ImmutableMap.builder(); params.put("name", String.valueOf(request.getName())); return params.build(); } }) .build(); - GrpcCallSettings< - Storage.BatchCommitWriteStreamsRequest, Storage.BatchCommitWriteStreamsResponse> + GrpcCallSettings batchCommitWriteStreamsTransportSettings = GrpcCallSettings - . - newBuilder() + .newBuilder() .setMethodDescriptor(batchCommitWriteStreamsMethodDescriptor) .setParamsExtractor( - new RequestParamsExtractor() { + new RequestParamsExtractor() { @Override - public Map extract( - Storage.BatchCommitWriteStreamsRequest request) { + public Map extract(BatchCommitWriteStreamsRequest request) { ImmutableMap.Builder params = ImmutableMap.builder(); params.put("parent", String.valueOf(request.getParent())); return params.build(); } }) .build(); - GrpcCallSettings - flushRowsTransportSettings = - GrpcCallSettings.newBuilder() - .setMethodDescriptor(flushRowsMethodDescriptor) - .setParamsExtractor( - new RequestParamsExtractor() { - @Override - public Map extract(Storage.FlushRowsRequest request) { - ImmutableMap.Builder params = ImmutableMap.builder(); - params.put("write_stream", String.valueOf(request.getWriteStream())); - return params.build(); - } - }) - .build(); + GrpcCallSettings flushRowsTransportSettings = + GrpcCallSettings.newBuilder() + .setMethodDescriptor(flushRowsMethodDescriptor) + .setParamsExtractor( + new RequestParamsExtractor() { + @Override + public Map extract(FlushRowsRequest request) { + ImmutableMap.Builder params = ImmutableMap.builder(); + params.put("write_stream", String.valueOf(request.getWriteStream())); + return params.build(); + } + }) + .build(); this.createWriteStreamCallable = callableFactory.createUnaryCallable( @@ -282,40 +262,32 @@ public Map extract(Storage.FlushRowsRequest request) { callableFactory.createUnaryCallable( flushRowsTransportSettings, settings.flushRowsSettings(), clientContext); - this.backgroundResources = - new BackgroundResourceAggregation(clientContext.getBackgroundResources()); - } - - public GrpcOperationsStub getOperationsStub() { - return operationsStub; + backgroundResources = new BackgroundResourceAggregation(clientContext.getBackgroundResources()); } - public UnaryCallable - createWriteStreamCallable() { + public UnaryCallable createWriteStreamCallable() { return createWriteStreamCallable; } - public BidiStreamingCallable - appendRowsCallable() { + public BidiStreamingCallable appendRowsCallable() { return appendRowsCallable; } - public UnaryCallable getWriteStreamCallable() { + public UnaryCallable getWriteStreamCallable() { return getWriteStreamCallable; } - public UnaryCallable + public UnaryCallable finalizeWriteStreamCallable() { return finalizeWriteStreamCallable; } - public UnaryCallable< - Storage.BatchCommitWriteStreamsRequest, Storage.BatchCommitWriteStreamsResponse> + public UnaryCallable batchCommitWriteStreamsCallable() { return batchCommitWriteStreamsCallable; } - public UnaryCallable flushRowsCallable() { + public UnaryCallable flushRowsCallable() { return flushRowsCallable; } diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta1/BaseBigQueryStorageClient.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta1/BaseBigQueryStorageClient.java index acafbdcbf0..8f337335b5 100644 --- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta1/BaseBigQueryStorageClient.java +++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta1/BaseBigQueryStorageClient.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,13 +13,23 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - package com.google.cloud.bigquery.storage.v1beta1; import com.google.api.core.BetaApi; import com.google.api.gax.core.BackgroundResource; import com.google.api.gax.rpc.ServerStreamingCallable; import com.google.api.gax.rpc.UnaryCallable; +import com.google.cloud.bigquery.storage.v1beta1.Storage.BatchCreateReadSessionStreamsRequest; +import com.google.cloud.bigquery.storage.v1beta1.Storage.BatchCreateReadSessionStreamsResponse; +import com.google.cloud.bigquery.storage.v1beta1.Storage.CreateReadSessionRequest; +import com.google.cloud.bigquery.storage.v1beta1.Storage.FinalizeStreamRequest; +import com.google.cloud.bigquery.storage.v1beta1.Storage.ReadRowsRequest; +import com.google.cloud.bigquery.storage.v1beta1.Storage.ReadRowsResponse; +import com.google.cloud.bigquery.storage.v1beta1.Storage.ReadSession; +import com.google.cloud.bigquery.storage.v1beta1.Storage.SplitReadStreamRequest; +import com.google.cloud.bigquery.storage.v1beta1.Storage.SplitReadStreamResponse; +import com.google.cloud.bigquery.storage.v1beta1.Storage.Stream; +import com.google.cloud.bigquery.storage.v1beta1.TableReferenceProto.TableReference; import com.google.cloud.bigquery.storage.v1beta1.stub.BigQueryStorageStub; import com.google.cloud.bigquery.storage.v1beta1.stub.BigQueryStorageStubSettings; import com.google.protobuf.Empty; @@ -27,7 +37,7 @@ import java.util.concurrent.TimeUnit; import javax.annotation.Generated; -// AUTO-GENERATED DOCUMENTATION AND CLASS. +// AUTO-GENERATED DOCUMENTATION AND SERVICE /** * Service Description: BigQuery storage API. * @@ -36,7 +46,18 @@ *

This class provides the ability to make remote calls to the backing service through method * calls that map to API methods. Sample code to get started: * - *

Note: close() needs to be called on the BaseBigQueryStorageClient object to clean up resources + *

+ * 
+ * try (BaseBigQueryStorageClient baseBigQueryStorageClient = BaseBigQueryStorageClient.create()) {
+ *   TableReference tableReference = TableReference.newBuilder().build();
+ *   ProjectName parent = ProjectName.of("[PROJECT]");
+ *   int requestedStreams = 0;
+ *   ReadSession response = baseBigQueryStorageClient.createReadSession(tableReference, parent, requestedStreams);
+ * }
+ * 
+ * 
+ * + *

Note: close() needs to be called on the baseBigQueryStorageClient object to clean up resources * such as threads. In the example above, try-with-resources is used, which automatically calls * close(). * @@ -65,28 +86,30 @@ * *

To customize credentials: * - *

{@code
+ * 
+ * 
  * BaseBigQueryStorageSettings baseBigQueryStorageSettings =
  *     BaseBigQueryStorageSettings.newBuilder()
  *         .setCredentialsProvider(FixedCredentialsProvider.create(myCredentials))
  *         .build();
  * BaseBigQueryStorageClient baseBigQueryStorageClient =
  *     BaseBigQueryStorageClient.create(baseBigQueryStorageSettings);
- * }
+ * + *
* - *

To customize the endpoint: + * To customize the endpoint: * - *

{@code
+ * 
+ * 
  * BaseBigQueryStorageSettings baseBigQueryStorageSettings =
  *     BaseBigQueryStorageSettings.newBuilder().setEndpoint(myEndpoint).build();
  * BaseBigQueryStorageClient baseBigQueryStorageClient =
  *     BaseBigQueryStorageClient.create(baseBigQueryStorageSettings);
- * }
- * - *

Please refer to the GitHub repository's samples for more quickstart code snippets. + * + *

*/ -@BetaApi @Generated("by gapic-generator") +@BetaApi public class BaseBigQueryStorageClient implements BackgroundResource { private final BaseBigQueryStorageSettings settings; private final BigQueryStorageStub stub; @@ -107,7 +130,7 @@ public static final BaseBigQueryStorageClient create(BaseBigQueryStorageSettings /** * Constructs an instance of BaseBigQueryStorageClient, using the given stub for making calls. - * This is for advanced usage - prefer using create(BaseBigQueryStorageSettings). + * This is for advanced usage - prefer to use BaseBigQueryStorageSettings}. */ @BetaApi("A restructuring of stub classes is planned, so this may break in the future") public static final BaseBigQueryStorageClient create(BigQueryStorageStub stub) { @@ -139,7 +162,7 @@ public BigQueryStorageStub getStub() { return stub; } - // AUTO-GENERATED DOCUMENTATION AND METHOD. + // AUTO-GENERATED DOCUMENTATION AND METHOD /** * Creates a new read session. A read session divides the contents of a BigQuery table into one or * more streams, which can then be used to read data from the table. The read session also @@ -152,20 +175,31 @@ public BigQueryStorageStub getStub() { *

Read sessions automatically expire 24 hours after they are created and do not require manual * clean-up by the caller. * - * @param table_reference Required. Reference to the table to read. + *

Sample code: + * + *


+   * try (BaseBigQueryStorageClient baseBigQueryStorageClient = BaseBigQueryStorageClient.create()) {
+   *   TableReference tableReference = TableReference.newBuilder().build();
+   *   ProjectName parent = ProjectName.of("[PROJECT]");
+   *   int requestedStreams = 0;
+   *   ReadSession response = baseBigQueryStorageClient.createReadSession(tableReference, parent, requestedStreams);
+   * }
+   * 
+ * + * @param tableReference Required. Reference to the table to read. * @param parent Required. String of the form `projects/{project_id}` indicating the project this * ReadSession is associated with. This is the project that will be billed for usage. - * @param requested_streams Initial number of streams. If unset or 0, we will provide a value of + * @param requestedStreams Initial number of streams. If unset or 0, we will provide a value of * streams so as to produce reasonable throughput. Must be non-negative. The number of streams * may be lower than the requested number, depending on the amount parallelism that is * reasonable for the table and the maximum amount of parallelism allowed by the system. *

Streams must be read starting from offset 0. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ - public final Storage.ReadSession createReadSession( - TableReferenceProto.TableReference tableReference, ProjectName parent, int requestedStreams) { - Storage.CreateReadSessionRequest request = - Storage.CreateReadSessionRequest.newBuilder() + public final ReadSession createReadSession( + TableReference tableReference, ProjectName parent, int requestedStreams) { + CreateReadSessionRequest request = + CreateReadSessionRequest.newBuilder() .setTableReference(tableReference) .setParent(parent == null ? null : parent.toString()) .setRequestedStreams(requestedStreams) @@ -173,7 +207,7 @@ public final Storage.ReadSession createReadSession( return createReadSession(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD. + // AUTO-GENERATED DOCUMENTATION AND METHOD /** * Creates a new read session. A read session divides the contents of a BigQuery table into one or * more streams, which can then be used to read data from the table. The read session also @@ -186,20 +220,31 @@ public final Storage.ReadSession createReadSession( *

Read sessions automatically expire 24 hours after they are created and do not require manual * clean-up by the caller. * - * @param table_reference Required. Reference to the table to read. + *

Sample code: + * + *


+   * try (BaseBigQueryStorageClient baseBigQueryStorageClient = BaseBigQueryStorageClient.create()) {
+   *   TableReference tableReference = TableReference.newBuilder().build();
+   *   ProjectName parent = ProjectName.of("[PROJECT]");
+   *   int requestedStreams = 0;
+   *   ReadSession response = baseBigQueryStorageClient.createReadSession(tableReference, parent.toString(), requestedStreams);
+   * }
+   * 
+ * + * @param tableReference Required. Reference to the table to read. * @param parent Required. String of the form `projects/{project_id}` indicating the project this * ReadSession is associated with. This is the project that will be billed for usage. - * @param requested_streams Initial number of streams. If unset or 0, we will provide a value of + * @param requestedStreams Initial number of streams. If unset or 0, we will provide a value of * streams so as to produce reasonable throughput. Must be non-negative. The number of streams * may be lower than the requested number, depending on the amount parallelism that is * reasonable for the table and the maximum amount of parallelism allowed by the system. *

Streams must be read starting from offset 0. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ - public final Storage.ReadSession createReadSession( - TableReferenceProto.TableReference tableReference, String parent, int requestedStreams) { - Storage.CreateReadSessionRequest request = - Storage.CreateReadSessionRequest.newBuilder() + public final ReadSession createReadSession( + TableReference tableReference, String parent, int requestedStreams) { + CreateReadSessionRequest request = + CreateReadSessionRequest.newBuilder() .setTableReference(tableReference) .setParent(parent) .setRequestedStreams(requestedStreams) @@ -207,7 +252,7 @@ public final Storage.ReadSession createReadSession( return createReadSession(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD. + // AUTO-GENERATED DOCUMENTATION AND METHOD /** * Creates a new read session. A read session divides the contents of a BigQuery table into one or * more streams, which can then be used to read data from the table. The read session also @@ -220,14 +265,28 @@ public final Storage.ReadSession createReadSession( *

Read sessions automatically expire 24 hours after they are created and do not require manual * clean-up by the caller. * + *

Sample code: + * + *


+   * try (BaseBigQueryStorageClient baseBigQueryStorageClient = BaseBigQueryStorageClient.create()) {
+   *   TableReference tableReference = TableReference.newBuilder().build();
+   *   ProjectName parent = ProjectName.of("[PROJECT]");
+   *   CreateReadSessionRequest request = CreateReadSessionRequest.newBuilder()
+   *     .setTableReference(tableReference)
+   *     .setParent(parent.toString())
+   *     .build();
+   *   ReadSession response = baseBigQueryStorageClient.createReadSession(request);
+   * }
+   * 
+ * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ - public final Storage.ReadSession createReadSession(Storage.CreateReadSessionRequest request) { + public final ReadSession createReadSession(CreateReadSessionRequest request) { return createReadSessionCallable().call(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD. + // AUTO-GENERATED DOCUMENTATION AND METHOD /** * Creates a new read session. A read session divides the contents of a BigQuery table into one or * more streams, which can then be used to read data from the table. The read session also @@ -241,13 +300,26 @@ public final Storage.ReadSession createReadSession(Storage.CreateReadSessionRequ * clean-up by the caller. * *

Sample code: + * + *


+   * try (BaseBigQueryStorageClient baseBigQueryStorageClient = BaseBigQueryStorageClient.create()) {
+   *   TableReference tableReference = TableReference.newBuilder().build();
+   *   ProjectName parent = ProjectName.of("[PROJECT]");
+   *   CreateReadSessionRequest request = CreateReadSessionRequest.newBuilder()
+   *     .setTableReference(tableReference)
+   *     .setParent(parent.toString())
+   *     .build();
+   *   ApiFuture<ReadSession> future = baseBigQueryStorageClient.createReadSessionCallable().futureCall(request);
+   *   // Do something
+   *   ReadSession response = future.get();
+   * }
+   * 
*/ - public final UnaryCallable - createReadSessionCallable() { + public final UnaryCallable createReadSessionCallable() { return stub.createReadSessionCallable(); } - // AUTO-GENERATED DOCUMENTATION AND METHOD. + // AUTO-GENERATED DOCUMENTATION AND METHOD /** * Reads rows from the table in the format prescribed by the read session. Each response contains * one or more table rows, up to a maximum of 10 MiB per response; read requests which attempt to @@ -259,61 +331,111 @@ public final Storage.ReadSession createReadSession(Storage.CreateReadSessionRequ * data. * *

Sample code: + * + *


+   * try (BaseBigQueryStorageClient baseBigQueryStorageClient = BaseBigQueryStorageClient.create()) {
+   *   StreamPosition readPosition = StreamPosition.newBuilder().build();
+   *   ReadRowsRequest request = ReadRowsRequest.newBuilder()
+   *     .setReadPosition(readPosition)
+   *     .build();
+   *
+   *   ServerStream<ReadRowsResponse> stream = baseBigQueryStorageClient.readRowsCallable().call(request);
+   *   for (ReadRowsResponse response : stream) {
+   *     // Do something when receive a response
+   *   }
+   * }
+   * 
*/ - public final ServerStreamingCallable - readRowsCallable() { + public final ServerStreamingCallable readRowsCallable() { return stub.readRowsCallable(); } - // AUTO-GENERATED DOCUMENTATION AND METHOD. + // AUTO-GENERATED DOCUMENTATION AND METHOD /** * Creates additional streams for a ReadSession. This API can be used to dynamically adjust the * parallelism of a batch processing task upwards by adding additional workers. * + *

Sample code: + * + *


+   * try (BaseBigQueryStorageClient baseBigQueryStorageClient = BaseBigQueryStorageClient.create()) {
+   *   ReadSession session = ReadSession.newBuilder().build();
+   *   int requestedStreams = 0;
+   *   BatchCreateReadSessionStreamsResponse response = baseBigQueryStorageClient.batchCreateReadSessionStreams(session, requestedStreams);
+   * }
+   * 
+ * * @param session Required. Must be a non-expired session obtained from a call to * CreateReadSession. Only the name field needs to be set. - * @param requested_streams Required. Number of new streams requested. Must be positive. Number of + * @param requestedStreams Required. Number of new streams requested. Must be positive. Number of * added streams may be less than this, see CreateReadSessionRequest for more information. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ - public final Storage.BatchCreateReadSessionStreamsResponse batchCreateReadSessionStreams( - Storage.ReadSession session, int requestedStreams) { - Storage.BatchCreateReadSessionStreamsRequest request = - Storage.BatchCreateReadSessionStreamsRequest.newBuilder() + public final BatchCreateReadSessionStreamsResponse batchCreateReadSessionStreams( + ReadSession session, int requestedStreams) { + BatchCreateReadSessionStreamsRequest request = + BatchCreateReadSessionStreamsRequest.newBuilder() .setSession(session) .setRequestedStreams(requestedStreams) .build(); return batchCreateReadSessionStreams(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD. + // AUTO-GENERATED DOCUMENTATION AND METHOD /** * Creates additional streams for a ReadSession. This API can be used to dynamically adjust the * parallelism of a batch processing task upwards by adding additional workers. * + *

Sample code: + * + *


+   * try (BaseBigQueryStorageClient baseBigQueryStorageClient = BaseBigQueryStorageClient.create()) {
+   *   ReadSession session = ReadSession.newBuilder().build();
+   *   int requestedStreams = 0;
+   *   BatchCreateReadSessionStreamsRequest request = BatchCreateReadSessionStreamsRequest.newBuilder()
+   *     .setSession(session)
+   *     .setRequestedStreams(requestedStreams)
+   *     .build();
+   *   BatchCreateReadSessionStreamsResponse response = baseBigQueryStorageClient.batchCreateReadSessionStreams(request);
+   * }
+   * 
+ * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ - public final Storage.BatchCreateReadSessionStreamsResponse batchCreateReadSessionStreams( - Storage.BatchCreateReadSessionStreamsRequest request) { + public final BatchCreateReadSessionStreamsResponse batchCreateReadSessionStreams( + BatchCreateReadSessionStreamsRequest request) { return batchCreateReadSessionStreamsCallable().call(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD. + // AUTO-GENERATED DOCUMENTATION AND METHOD /** * Creates additional streams for a ReadSession. This API can be used to dynamically adjust the * parallelism of a batch processing task upwards by adding additional workers. * *

Sample code: + * + *


+   * try (BaseBigQueryStorageClient baseBigQueryStorageClient = BaseBigQueryStorageClient.create()) {
+   *   ReadSession session = ReadSession.newBuilder().build();
+   *   int requestedStreams = 0;
+   *   BatchCreateReadSessionStreamsRequest request = BatchCreateReadSessionStreamsRequest.newBuilder()
+   *     .setSession(session)
+   *     .setRequestedStreams(requestedStreams)
+   *     .build();
+   *   ApiFuture<BatchCreateReadSessionStreamsResponse> future = baseBigQueryStorageClient.batchCreateReadSessionStreamsCallable().futureCall(request);
+   *   // Do something
+   *   BatchCreateReadSessionStreamsResponse response = future.get();
+   * }
+   * 
*/ public final UnaryCallable< - Storage.BatchCreateReadSessionStreamsRequest, - Storage.BatchCreateReadSessionStreamsResponse> + BatchCreateReadSessionStreamsRequest, BatchCreateReadSessionStreamsResponse> batchCreateReadSessionStreamsCallable() { return stub.batchCreateReadSessionStreamsCallable(); } - // AUTO-GENERATED DOCUMENTATION AND METHOD. + // AUTO-GENERATED DOCUMENTATION AND METHOD /** * Triggers the graceful termination of a single stream in a ReadSession. This API can be used to * dynamically adjust the parallelism of a batch processing task downwards without losing data. @@ -327,16 +449,24 @@ public final Storage.BatchCreateReadSessionStreamsResponse batchCreateReadSessio *

This method will return an error if there are no other live streams in the Session, or if * SplitReadStream() has been called on the given Stream. * + *

Sample code: + * + *


+   * try (BaseBigQueryStorageClient baseBigQueryStorageClient = BaseBigQueryStorageClient.create()) {
+   *   Stream stream = Stream.newBuilder().build();
+   *   baseBigQueryStorageClient.finalizeStream(stream);
+   * }
+   * 
+ * * @param stream Required. Stream to finalize. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ - public final void finalizeStream(Storage.Stream stream) { - Storage.FinalizeStreamRequest request = - Storage.FinalizeStreamRequest.newBuilder().setStream(stream).build(); + public final void finalizeStream(Stream stream) { + FinalizeStreamRequest request = FinalizeStreamRequest.newBuilder().setStream(stream).build(); finalizeStream(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD. + // AUTO-GENERATED DOCUMENTATION AND METHOD /** * Triggers the graceful termination of a single stream in a ReadSession. This API can be used to * dynamically adjust the parallelism of a batch processing task downwards without losing data. @@ -350,14 +480,26 @@ public final void finalizeStream(Storage.Stream stream) { *

This method will return an error if there are no other live streams in the Session, or if * SplitReadStream() has been called on the given Stream. * + *

Sample code: + * + *


+   * try (BaseBigQueryStorageClient baseBigQueryStorageClient = BaseBigQueryStorageClient.create()) {
+   *   Stream stream = Stream.newBuilder().build();
+   *   FinalizeStreamRequest request = FinalizeStreamRequest.newBuilder()
+   *     .setStream(stream)
+   *     .build();
+   *   baseBigQueryStorageClient.finalizeStream(request);
+   * }
+   * 
+ * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ - public final void finalizeStream(Storage.FinalizeStreamRequest request) { + public final void finalizeStream(FinalizeStreamRequest request) { finalizeStreamCallable().call(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD. + // AUTO-GENERATED DOCUMENTATION AND METHOD /** * Triggers the graceful termination of a single stream in a ReadSession. This API can be used to * dynamically adjust the parallelism of a batch processing task downwards without losing data. @@ -372,12 +514,24 @@ public final void finalizeStream(Storage.FinalizeStreamRequest request) { * SplitReadStream() has been called on the given Stream. * *

Sample code: + * + *


+   * try (BaseBigQueryStorageClient baseBigQueryStorageClient = BaseBigQueryStorageClient.create()) {
+   *   Stream stream = Stream.newBuilder().build();
+   *   FinalizeStreamRequest request = FinalizeStreamRequest.newBuilder()
+   *     .setStream(stream)
+   *     .build();
+   *   ApiFuture<Void> future = baseBigQueryStorageClient.finalizeStreamCallable().futureCall(request);
+   *   // Do something
+   *   future.get();
+   * }
+   * 
*/ - public final UnaryCallable finalizeStreamCallable() { + public final UnaryCallable finalizeStreamCallable() { return stub.finalizeStreamCallable(); } - // AUTO-GENERATED DOCUMENTATION AND METHOD. + // AUTO-GENERATED DOCUMENTATION AND METHOD /** * Splits a given read stream into two Streams. These streams are referred to as the primary and * the residual of the split. The original stream can still be read from in the same manner as @@ -391,16 +545,25 @@ public final UnaryCallable finalizeStreamC * *

This method is guaranteed to be idempotent. * - * @param original_stream Required. Stream to split. + *

Sample code: + * + *


+   * try (BaseBigQueryStorageClient baseBigQueryStorageClient = BaseBigQueryStorageClient.create()) {
+   *   Stream originalStream = Stream.newBuilder().build();
+   *   SplitReadStreamResponse response = baseBigQueryStorageClient.splitReadStream(originalStream);
+   * }
+   * 
+ * + * @param originalStream Required. Stream to split. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ - public final Storage.SplitReadStreamResponse splitReadStream(Storage.Stream originalStream) { - Storage.SplitReadStreamRequest request = - Storage.SplitReadStreamRequest.newBuilder().setOriginalStream(originalStream).build(); + public final SplitReadStreamResponse splitReadStream(Stream originalStream) { + SplitReadStreamRequest request = + SplitReadStreamRequest.newBuilder().setOriginalStream(originalStream).build(); return splitReadStream(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD. + // AUTO-GENERATED DOCUMENTATION AND METHOD /** * Splits a given read stream into two Streams. These streams are referred to as the primary and * the residual of the split. The original stream can still be read from in the same manner as @@ -414,15 +577,26 @@ public final Storage.SplitReadStreamResponse splitReadStream(Storage.Stream orig * *

This method is guaranteed to be idempotent. * + *

Sample code: + * + *


+   * try (BaseBigQueryStorageClient baseBigQueryStorageClient = BaseBigQueryStorageClient.create()) {
+   *   Stream originalStream = Stream.newBuilder().build();
+   *   SplitReadStreamRequest request = SplitReadStreamRequest.newBuilder()
+   *     .setOriginalStream(originalStream)
+   *     .build();
+   *   SplitReadStreamResponse response = baseBigQueryStorageClient.splitReadStream(request);
+   * }
+   * 
+ * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ - public final Storage.SplitReadStreamResponse splitReadStream( - Storage.SplitReadStreamRequest request) { + public final SplitReadStreamResponse splitReadStream(SplitReadStreamRequest request) { return splitReadStreamCallable().call(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD. + // AUTO-GENERATED DOCUMENTATION AND METHOD /** * Splits a given read stream into two Streams. These streams are referred to as the primary and * the residual of the split. The original stream can still be read from in the same manner as @@ -437,8 +611,20 @@ public final Storage.SplitReadStreamResponse splitReadStream( *

This method is guaranteed to be idempotent. * *

Sample code: + * + *


+   * try (BaseBigQueryStorageClient baseBigQueryStorageClient = BaseBigQueryStorageClient.create()) {
+   *   Stream originalStream = Stream.newBuilder().build();
+   *   SplitReadStreamRequest request = SplitReadStreamRequest.newBuilder()
+   *     .setOriginalStream(originalStream)
+   *     .build();
+   *   ApiFuture<SplitReadStreamResponse> future = baseBigQueryStorageClient.splitReadStreamCallable().futureCall(request);
+   *   // Do something
+   *   SplitReadStreamResponse response = future.get();
+   * }
+   * 
*/ - public final UnaryCallable + public final UnaryCallable splitReadStreamCallable() { return stub.splitReadStreamCallable(); } diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta1/BaseBigQueryStorageSettings.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta1/BaseBigQueryStorageSettings.java index 73b6bf5729..a1bb6b456e 100644 --- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta1/BaseBigQueryStorageSettings.java +++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta1/BaseBigQueryStorageSettings.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,7 +13,6 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - package com.google.cloud.bigquery.storage.v1beta1; import com.google.api.core.ApiFunction; @@ -27,13 +26,22 @@ import com.google.api.gax.rpc.ServerStreamingCallSettings; import com.google.api.gax.rpc.TransportChannelProvider; import com.google.api.gax.rpc.UnaryCallSettings; +import com.google.cloud.bigquery.storage.v1beta1.Storage.BatchCreateReadSessionStreamsRequest; +import com.google.cloud.bigquery.storage.v1beta1.Storage.BatchCreateReadSessionStreamsResponse; +import com.google.cloud.bigquery.storage.v1beta1.Storage.CreateReadSessionRequest; +import com.google.cloud.bigquery.storage.v1beta1.Storage.FinalizeStreamRequest; +import com.google.cloud.bigquery.storage.v1beta1.Storage.ReadRowsRequest; +import com.google.cloud.bigquery.storage.v1beta1.Storage.ReadRowsResponse; +import com.google.cloud.bigquery.storage.v1beta1.Storage.ReadSession; +import com.google.cloud.bigquery.storage.v1beta1.Storage.SplitReadStreamRequest; +import com.google.cloud.bigquery.storage.v1beta1.Storage.SplitReadStreamResponse; import com.google.cloud.bigquery.storage.v1beta1.stub.BigQueryStorageStubSettings; import com.google.protobuf.Empty; import java.io.IOException; import java.util.List; import javax.annotation.Generated; -// AUTO-GENERATED DOCUMENTATION AND CLASS. +// AUTO-GENERATED DOCUMENTATION AND CLASS /** * Settings class to configure an instance of {@link BaseBigQueryStorageClient}. * @@ -51,53 +59,48 @@ * *

For example, to set the total timeout of createReadSession to 30 seconds: * - *

{@code
+ * 
+ * 
  * BaseBigQueryStorageSettings.Builder baseBigQueryStorageSettingsBuilder =
  *     BaseBigQueryStorageSettings.newBuilder();
  * baseBigQueryStorageSettingsBuilder
  *     .createReadSessionSettings()
  *     .setRetrySettings(
- *         baseBigQueryStorageSettingsBuilder
- *             .createReadSessionSettings()
- *             .getRetrySettings()
- *             .toBuilder()
+ *         baseBigQueryStorageSettingsBuilder.createReadSessionSettings().getRetrySettings().toBuilder()
  *             .setTotalTimeout(Duration.ofSeconds(30))
  *             .build());
- * BaseBigQueryStorageSettings baseBigQueryStorageSettings =
- *     baseBigQueryStorageSettingsBuilder.build();
- * }
+ * BaseBigQueryStorageSettings baseBigQueryStorageSettings = baseBigQueryStorageSettingsBuilder.build(); + * + *
*/ -@Generated("by gapic-generator-java") +@Generated("by gapic-generator") +@BetaApi public class BaseBigQueryStorageSettings extends ClientSettings { - /** Returns the object with the settings used for calls to createReadSession. */ - public UnaryCallSettings - createReadSessionSettings() { + public UnaryCallSettings createReadSessionSettings() { return ((BigQueryStorageStubSettings) getStubSettings()).createReadSessionSettings(); } /** Returns the object with the settings used for calls to readRows. */ - public ServerStreamingCallSettings - readRowsSettings() { + public ServerStreamingCallSettings readRowsSettings() { return ((BigQueryStorageStubSettings) getStubSettings()).readRowsSettings(); } /** Returns the object with the settings used for calls to batchCreateReadSessionStreams. */ public UnaryCallSettings< - Storage.BatchCreateReadSessionStreamsRequest, - Storage.BatchCreateReadSessionStreamsResponse> + BatchCreateReadSessionStreamsRequest, BatchCreateReadSessionStreamsResponse> batchCreateReadSessionStreamsSettings() { return ((BigQueryStorageStubSettings) getStubSettings()) .batchCreateReadSessionStreamsSettings(); } /** Returns the object with the settings used for calls to finalizeStream. */ - public UnaryCallSettings finalizeStreamSettings() { + public UnaryCallSettings finalizeStreamSettings() { return ((BigQueryStorageStubSettings) getStubSettings()).finalizeStreamSettings(); } /** Returns the object with the settings used for calls to splitReadStream. */ - public UnaryCallSettings + public UnaryCallSettings splitReadStreamSettings() { return ((BigQueryStorageStubSettings) getStubSettings()).splitReadStreamSettings(); } @@ -162,15 +165,18 @@ protected BaseBigQueryStorageSettings(Builder settingsBuilder) throws IOExceptio /** Builder for BaseBigQueryStorageSettings. */ public static class Builder extends ClientSettings.Builder { - protected Builder() throws IOException { - this(((ClientContext) null)); + this((ClientContext) null); } protected Builder(ClientContext clientContext) { super(BigQueryStorageStubSettings.newBuilder(clientContext)); } + private static Builder createDefault() { + return new Builder(BigQueryStorageStubSettings.newBuilder()); + } + protected Builder(BaseBigQueryStorageSettings settings) { super(settings.getStubSettings().toBuilder()); } @@ -179,15 +185,11 @@ protected Builder(BigQueryStorageStubSettings.Builder stubSettings) { super(stubSettings); } - private static Builder createDefault() { - return new Builder(BigQueryStorageStubSettings.newBuilder()); - } - public BigQueryStorageStubSettings.Builder getStubSettingsBuilder() { return ((BigQueryStorageStubSettings.Builder) getStubSettings()); } - // NEXT_MAJOR_VER: remove 'throws Exception'. + // NEXT_MAJOR_VER: remove 'throws Exception' /** * Applies the given settings updater function to all of the unary API methods in this service. * @@ -201,34 +203,31 @@ public Builder applyToAllUnaryMethods( } /** Returns the builder for the settings used for calls to createReadSession. */ - public UnaryCallSettings.Builder + public UnaryCallSettings.Builder createReadSessionSettings() { return getStubSettingsBuilder().createReadSessionSettings(); } /** Returns the builder for the settings used for calls to readRows. */ - public ServerStreamingCallSettings.Builder + public ServerStreamingCallSettings.Builder readRowsSettings() { return getStubSettingsBuilder().readRowsSettings(); } /** Returns the builder for the settings used for calls to batchCreateReadSessionStreams. */ public UnaryCallSettings.Builder< - Storage.BatchCreateReadSessionStreamsRequest, - Storage.BatchCreateReadSessionStreamsResponse> + BatchCreateReadSessionStreamsRequest, BatchCreateReadSessionStreamsResponse> batchCreateReadSessionStreamsSettings() { return getStubSettingsBuilder().batchCreateReadSessionStreamsSettings(); } /** Returns the builder for the settings used for calls to finalizeStream. */ - public UnaryCallSettings.Builder - finalizeStreamSettings() { + public UnaryCallSettings.Builder finalizeStreamSettings() { return getStubSettingsBuilder().finalizeStreamSettings(); } /** Returns the builder for the settings used for calls to splitReadStream. */ - public UnaryCallSettings.Builder< - Storage.SplitReadStreamRequest, Storage.SplitReadStreamResponse> + public UnaryCallSettings.Builder splitReadStreamSettings() { return getStubSettingsBuilder().splitReadStreamSettings(); } diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta1/package-info.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta1/package-info.java index eff4878eee..5c0d3b601e 100644 --- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta1/package-info.java +++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta1/package-info.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,17 +15,30 @@ */ /** - * The interfaces provided are listed below, along with usage samples. + * A client to BigQuery Storage API. * - *

======================= BigQueryStorageClient ======================= + *

The interfaces provided are listed below, along with usage samples. + * + *

========================= BaseBigQueryStorageClient ========================= * *

Service Description: BigQuery storage API. * *

The BigQuery storage API can be used to read data stored in BigQuery. * - *

Sample for BigQueryStorageClient: + *

Sample for BaseBigQueryStorageClient: + * + *

+ * 
+ * try (BaseBigQueryStorageClient baseBigQueryStorageClient = BaseBigQueryStorageClient.create()) {
+ *   TableReference tableReference = TableReference.newBuilder().build();
+ *   ProjectName parent = ProjectName.of("[PROJECT]");
+ *   int requestedStreams = 0;
+ *   ReadSession response = baseBigQueryStorageClient.createReadSession(tableReference, parent, requestedStreams);
+ * }
+ * 
+ * 
*/ -@Generated("by gapic-generator-java") +@Generated("by gapic-generator") package com.google.cloud.bigquery.storage.v1beta1; import javax.annotation.Generated; diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta1/stub/BigQueryStorageStub.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta1/stub/BigQueryStorageStub.java index d7f64bde10..2d806771cb 100644 --- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta1/stub/BigQueryStorageStub.java +++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta1/stub/BigQueryStorageStub.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,49 +13,53 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - package com.google.cloud.bigquery.storage.v1beta1.stub; +import com.google.api.core.BetaApi; import com.google.api.gax.core.BackgroundResource; import com.google.api.gax.rpc.ServerStreamingCallable; import com.google.api.gax.rpc.UnaryCallable; -import com.google.cloud.bigquery.storage.v1beta1.Storage; +import com.google.cloud.bigquery.storage.v1beta1.Storage.BatchCreateReadSessionStreamsRequest; +import com.google.cloud.bigquery.storage.v1beta1.Storage.BatchCreateReadSessionStreamsResponse; +import com.google.cloud.bigquery.storage.v1beta1.Storage.CreateReadSessionRequest; +import com.google.cloud.bigquery.storage.v1beta1.Storage.FinalizeStreamRequest; +import com.google.cloud.bigquery.storage.v1beta1.Storage.ReadRowsRequest; +import com.google.cloud.bigquery.storage.v1beta1.Storage.ReadRowsResponse; +import com.google.cloud.bigquery.storage.v1beta1.Storage.ReadSession; +import com.google.cloud.bigquery.storage.v1beta1.Storage.SplitReadStreamRequest; +import com.google.cloud.bigquery.storage.v1beta1.Storage.SplitReadStreamResponse; import com.google.protobuf.Empty; import javax.annotation.Generated; -// AUTO-GENERATED DOCUMENTATION AND CLASS. +// AUTO-GENERATED DOCUMENTATION AND CLASS /** - * Base stub class for the BigQueryStorage service API. + * Base stub class for BigQuery Storage API. * *

This class is for advanced usage and reflects the underlying API directly. */ @Generated("by gapic-generator") +@BetaApi("A restructuring of stub classes is planned, so this may break in the future") public abstract class BigQueryStorageStub implements BackgroundResource { - public UnaryCallable - createReadSessionCallable() { + public UnaryCallable createReadSessionCallable() { throw new UnsupportedOperationException("Not implemented: createReadSessionCallable()"); } - public ServerStreamingCallable - readRowsCallable() { + public ServerStreamingCallable readRowsCallable() { throw new UnsupportedOperationException("Not implemented: readRowsCallable()"); } - public UnaryCallable< - Storage.BatchCreateReadSessionStreamsRequest, - Storage.BatchCreateReadSessionStreamsResponse> + public UnaryCallable batchCreateReadSessionStreamsCallable() { throw new UnsupportedOperationException( "Not implemented: batchCreateReadSessionStreamsCallable()"); } - public UnaryCallable finalizeStreamCallable() { + public UnaryCallable finalizeStreamCallable() { throw new UnsupportedOperationException("Not implemented: finalizeStreamCallable()"); } - public UnaryCallable - splitReadStreamCallable() { + public UnaryCallable splitReadStreamCallable() { throw new UnsupportedOperationException("Not implemented: splitReadStreamCallable()"); } diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta1/stub/BigQueryStorageStubSettings.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta1/stub/BigQueryStorageStubSettings.java index 5225c25b98..1cf3ac0ccc 100644 --- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta1/stub/BigQueryStorageStubSettings.java +++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta1/stub/BigQueryStorageStubSettings.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,7 +13,6 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - package com.google.cloud.bigquery.storage.v1beta1.stub; import com.google.api.core.ApiFunction; @@ -32,7 +31,15 @@ import com.google.api.gax.rpc.StubSettings; import com.google.api.gax.rpc.TransportChannelProvider; import com.google.api.gax.rpc.UnaryCallSettings; -import com.google.cloud.bigquery.storage.v1beta1.Storage; +import com.google.cloud.bigquery.storage.v1beta1.Storage.BatchCreateReadSessionStreamsRequest; +import com.google.cloud.bigquery.storage.v1beta1.Storage.BatchCreateReadSessionStreamsResponse; +import com.google.cloud.bigquery.storage.v1beta1.Storage.CreateReadSessionRequest; +import com.google.cloud.bigquery.storage.v1beta1.Storage.FinalizeStreamRequest; +import com.google.cloud.bigquery.storage.v1beta1.Storage.ReadRowsRequest; +import com.google.cloud.bigquery.storage.v1beta1.Storage.ReadRowsResponse; +import com.google.cloud.bigquery.storage.v1beta1.Storage.ReadSession; +import com.google.cloud.bigquery.storage.v1beta1.Storage.SplitReadStreamRequest; +import com.google.cloud.bigquery.storage.v1beta1.Storage.SplitReadStreamResponse; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; @@ -43,7 +50,7 @@ import javax.annotation.Generated; import org.threeten.bp.Duration; -// AUTO-GENERATED DOCUMENTATION AND CLASS. +// AUTO-GENERATED DOCUMENTATION AND CLASS /** * Settings class to configure an instance of {@link BigQueryStorageStub}. * @@ -61,23 +68,22 @@ * *

For example, to set the total timeout of createReadSession to 30 seconds: * - *

{@code
- * BigQueryStorageStubSettings.Builder bigQueryStorageSettingsBuilder =
+ * 
+ * 
+ * BigQueryStorageStubSettings.Builder baseBigQueryStorageSettingsBuilder =
  *     BigQueryStorageStubSettings.newBuilder();
- * bigQueryStorageSettingsBuilder
+ * baseBigQueryStorageSettingsBuilder
  *     .createReadSessionSettings()
  *     .setRetrySettings(
- *         bigQueryStorageSettingsBuilder
- *             .createReadSessionSettings()
- *             .getRetrySettings()
- *             .toBuilder()
+ *         baseBigQueryStorageSettingsBuilder.createReadSessionSettings().getRetrySettings().toBuilder()
  *             .setTotalTimeout(Duration.ofSeconds(30))
  *             .build());
- * BigQueryStorageStubSettings bigQueryStorageSettings = bigQueryStorageSettingsBuilder.build();
- * }
+ * BigQueryStorageStubSettings baseBigQueryStorageSettings = baseBigQueryStorageSettingsBuilder.build(); + * + *
*/ +@Generated("by gapic-generator") @BetaApi -@Generated("by gapic-generator-java") public class BigQueryStorageStubSettings extends StubSettings { /** The default scopes of the service. */ private static final ImmutableList DEFAULT_SERVICE_SCOPES = @@ -87,45 +93,39 @@ public class BigQueryStorageStubSettings extends StubSettings - createReadSessionSettings; - private final ServerStreamingCallSettings - readRowsSettings; + private final UnaryCallSettings createReadSessionSettings; + private final ServerStreamingCallSettings readRowsSettings; private final UnaryCallSettings< - Storage.BatchCreateReadSessionStreamsRequest, - Storage.BatchCreateReadSessionStreamsResponse> + BatchCreateReadSessionStreamsRequest, BatchCreateReadSessionStreamsResponse> batchCreateReadSessionStreamsSettings; - private final UnaryCallSettings finalizeStreamSettings; - private final UnaryCallSettings + private final UnaryCallSettings finalizeStreamSettings; + private final UnaryCallSettings splitReadStreamSettings; /** Returns the object with the settings used for calls to createReadSession. */ - public UnaryCallSettings - createReadSessionSettings() { + public UnaryCallSettings createReadSessionSettings() { return createReadSessionSettings; } /** Returns the object with the settings used for calls to readRows. */ - public ServerStreamingCallSettings - readRowsSettings() { + public ServerStreamingCallSettings readRowsSettings() { return readRowsSettings; } /** Returns the object with the settings used for calls to batchCreateReadSessionStreams. */ public UnaryCallSettings< - Storage.BatchCreateReadSessionStreamsRequest, - Storage.BatchCreateReadSessionStreamsResponse> + BatchCreateReadSessionStreamsRequest, BatchCreateReadSessionStreamsResponse> batchCreateReadSessionStreamsSettings() { return batchCreateReadSessionStreamsSettings; } /** Returns the object with the settings used for calls to finalizeStream. */ - public UnaryCallSettings finalizeStreamSettings() { + public UnaryCallSettings finalizeStreamSettings() { return finalizeStreamSettings; } /** Returns the object with the settings used for calls to splitReadStream. */ - public UnaryCallSettings + public UnaryCallSettings splitReadStreamSettings() { return splitReadStreamSettings; } @@ -136,10 +136,10 @@ public BigQueryStorageStub createStub() throws IOException { .getTransportName() .equals(GrpcTransportChannel.getGrpcTransportName())) { return GrpcBigQueryStorageStub.create(this); + } else { + throw new UnsupportedOperationException( + "Transport not supported: " + getTransportChannelProvider().getTransportName()); } - throw new UnsupportedOperationException( - String.format( - "Transport not supported: %s", getTransportChannelProvider().getTransportName())); } /** Returns a builder for the default ExecutorProvider for this service. */ @@ -210,20 +210,18 @@ protected BigQueryStorageStubSettings(Builder settingsBuilder) throws IOExceptio /** Builder for BigQueryStorageStubSettings. */ public static class Builder extends StubSettings.Builder { private final ImmutableList> unaryMethodSettingsBuilders; - private final UnaryCallSettings.Builder + + private final UnaryCallSettings.Builder createReadSessionSettings; - private final ServerStreamingCallSettings.Builder< - Storage.ReadRowsRequest, Storage.ReadRowsResponse> + private final ServerStreamingCallSettings.Builder readRowsSettings; private final UnaryCallSettings.Builder< - Storage.BatchCreateReadSessionStreamsRequest, - Storage.BatchCreateReadSessionStreamsResponse> + BatchCreateReadSessionStreamsRequest, BatchCreateReadSessionStreamsResponse> batchCreateReadSessionStreamsSettings; - private final UnaryCallSettings.Builder - finalizeStreamSettings; - private final UnaryCallSettings.Builder< - Storage.SplitReadStreamRequest, Storage.SplitReadStreamResponse> + private final UnaryCallSettings.Builder finalizeStreamSettings; + private final UnaryCallSettings.Builder splitReadStreamSettings; + private static final ImmutableMap> RETRYABLE_CODE_DEFINITIONS; @@ -231,18 +229,19 @@ public static class Builder extends StubSettings.Builder> definitions = ImmutableMap.builder(); definitions.put( - "retry_policy_0_codes", + "retry_policy_1_codes", ImmutableSet.copyOf( Lists.newArrayList( StatusCode.Code.DEADLINE_EXCEEDED, StatusCode.Code.UNAVAILABLE))); + definitions.put("no_retry_codes", ImmutableSet.copyOf(Lists.newArrayList())); definitions.put( - "retry_policy_1_codes", - ImmutableSet.copyOf(Lists.newArrayList(StatusCode.Code.UNAVAILABLE))); - definitions.put( - "retry_policy_2_codes", + "retry_policy_3_codes", ImmutableSet.copyOf( Lists.newArrayList( StatusCode.Code.DEADLINE_EXCEEDED, StatusCode.Code.UNAVAILABLE))); + definitions.put( + "retry_policy_2_codes", + ImmutableSet.copyOf(Lists.newArrayList(StatusCode.Code.UNAVAILABLE))); RETRYABLE_CODE_DEFINITIONS = definitions.build(); } @@ -261,7 +260,7 @@ public static class Builder extends StubSettings.Builder>of( - createReadSessionSettings, - batchCreateReadSessionStreamsSettings, - finalizeStreamSettings, - splitReadStreamSettings); + initDefaults(this); } private static Builder createDefault() { - Builder builder = new Builder(((ClientContext) null)); - + Builder builder = new Builder((ClientContext) null); builder.setTransportChannelProvider(defaultTransportChannelProvider()); builder.setCredentialsProvider(defaultCredentialsProviderBuilder().build()); builder.setInternalHeaderProvider(defaultApiClientHeaderProviderBuilder().build()); builder.setEndpoint(getDefaultEndpoint()); - return initDefaults(builder); } private static Builder initDefaults(Builder builder) { - builder - .createReadSessionSettings() - .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) - .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")); builder - .readRowsSettings() + .createReadSessionSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_1_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_1_params")); builder - .batchCreateReadSessionStreamsSettings() + .readRowsSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_2_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_2_params")); + builder + .batchCreateReadSessionStreamsSettings() + .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_3_codes")) + .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_3_params")); + builder .finalizeStreamSettings() - .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_2_codes")) - .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_2_params")); + .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_3_codes")) + .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_3_params")); builder .splitReadStreamSettings() - .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_2_codes")) - .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_2_params")); + .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_3_codes")) + .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_3_params")); return builder; } - // NEXT_MAJOR_VER: remove 'throws Exception'. + protected Builder(BigQueryStorageStubSettings settings) { + super(settings); + + createReadSessionSettings = settings.createReadSessionSettings.toBuilder(); + readRowsSettings = settings.readRowsSettings.toBuilder(); + batchCreateReadSessionStreamsSettings = + settings.batchCreateReadSessionStreamsSettings.toBuilder(); + finalizeStreamSettings = settings.finalizeStreamSettings.toBuilder(); + splitReadStreamSettings = settings.splitReadStreamSettings.toBuilder(); + + unaryMethodSettingsBuilders = + ImmutableList.>of( + createReadSessionSettings, + batchCreateReadSessionStreamsSettings, + finalizeStreamSettings, + splitReadStreamSettings); + } + + // NEXT_MAJOR_VER: remove 'throws Exception' /** * Applies the given settings updater function to all of the unary API methods in this service. * @@ -384,34 +389,31 @@ public Builder applyToAllUnaryMethods( } /** Returns the builder for the settings used for calls to createReadSession. */ - public UnaryCallSettings.Builder + public UnaryCallSettings.Builder createReadSessionSettings() { return createReadSessionSettings; } /** Returns the builder for the settings used for calls to readRows. */ - public ServerStreamingCallSettings.Builder + public ServerStreamingCallSettings.Builder readRowsSettings() { return readRowsSettings; } /** Returns the builder for the settings used for calls to batchCreateReadSessionStreams. */ public UnaryCallSettings.Builder< - Storage.BatchCreateReadSessionStreamsRequest, - Storage.BatchCreateReadSessionStreamsResponse> + BatchCreateReadSessionStreamsRequest, BatchCreateReadSessionStreamsResponse> batchCreateReadSessionStreamsSettings() { return batchCreateReadSessionStreamsSettings; } /** Returns the builder for the settings used for calls to finalizeStream. */ - public UnaryCallSettings.Builder - finalizeStreamSettings() { + public UnaryCallSettings.Builder finalizeStreamSettings() { return finalizeStreamSettings; } /** Returns the builder for the settings used for calls to splitReadStream. */ - public UnaryCallSettings.Builder< - Storage.SplitReadStreamRequest, Storage.SplitReadStreamResponse> + public UnaryCallSettings.Builder splitReadStreamSettings() { return splitReadStreamSettings; } diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta1/stub/GrpcBigQueryStorageCallableFactory.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta1/stub/GrpcBigQueryStorageCallableFactory.java index 4cf9880b97..16a6b42c5f 100644 --- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta1/stub/GrpcBigQueryStorageCallableFactory.java +++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta1/stub/GrpcBigQueryStorageCallableFactory.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,9 +13,9 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - package com.google.cloud.bigquery.storage.v1beta1.stub; +import com.google.api.core.BetaApi; import com.google.api.gax.grpc.GrpcCallSettings; import com.google.api.gax.grpc.GrpcCallableFactory; import com.google.api.gax.grpc.GrpcStubCallableFactory; @@ -31,19 +31,18 @@ import com.google.api.gax.rpc.StreamingCallSettings; import com.google.api.gax.rpc.UnaryCallSettings; import com.google.api.gax.rpc.UnaryCallable; -import com.google.longrunning.Operation; import com.google.longrunning.stub.OperationsStub; import javax.annotation.Generated; -// AUTO-GENERATED DOCUMENTATION AND CLASS. +// AUTO-GENERATED DOCUMENTATION AND CLASS /** - * gRPC callable factory implementation for the BigQueryStorage service API. + * gRPC callable factory implementation for BigQuery Storage API. * *

This class is for advanced usage. */ @Generated("by gapic-generator") +@BetaApi("The surface for use by generated code is not stable yet and may change in the future.") public class GrpcBigQueryStorageCallableFactory implements GrpcStubCallableFactory { - @Override public UnaryCallable createUnaryCallable( GrpcCallSettings grpcCallSettings, @@ -56,58 +55,61 @@ public UnaryCallable createUnaryCalla public UnaryCallable createPagedCallable( GrpcCallSettings grpcCallSettings, - PagedCallSettings callSettings, + PagedCallSettings pagedCallSettings, ClientContext clientContext) { - return GrpcCallableFactory.createPagedCallable(grpcCallSettings, callSettings, clientContext); + return GrpcCallableFactory.createPagedCallable( + grpcCallSettings, pagedCallSettings, clientContext); } @Override public UnaryCallable createBatchingCallable( GrpcCallSettings grpcCallSettings, - BatchingCallSettings callSettings, + BatchingCallSettings batchingCallSettings, ClientContext clientContext) { return GrpcCallableFactory.createBatchingCallable( - grpcCallSettings, callSettings, clientContext); + grpcCallSettings, batchingCallSettings, clientContext); } + @BetaApi( + "The surface for long-running operations is not stable yet and may change in the future.") @Override public OperationCallable createOperationCallable( - GrpcCallSettings grpcCallSettings, - OperationCallSettings callSettings, + GrpcCallSettings grpcCallSettings, + OperationCallSettings operationCallSettings, ClientContext clientContext, OperationsStub operationsStub) { return GrpcCallableFactory.createOperationCallable( - grpcCallSettings, callSettings, clientContext, operationsStub); + grpcCallSettings, operationCallSettings, clientContext, operationsStub); } @Override public BidiStreamingCallable createBidiStreamingCallable( GrpcCallSettings grpcCallSettings, - StreamingCallSettings callSettings, + StreamingCallSettings streamingCallSettings, ClientContext clientContext) { return GrpcCallableFactory.createBidiStreamingCallable( - grpcCallSettings, callSettings, clientContext); + grpcCallSettings, streamingCallSettings, clientContext); } @Override public ServerStreamingCallable createServerStreamingCallable( GrpcCallSettings grpcCallSettings, - ServerStreamingCallSettings callSettings, + ServerStreamingCallSettings streamingCallSettings, ClientContext clientContext) { return GrpcCallableFactory.createServerStreamingCallable( - grpcCallSettings, callSettings, clientContext); + grpcCallSettings, streamingCallSettings, clientContext); } @Override public ClientStreamingCallable createClientStreamingCallable( GrpcCallSettings grpcCallSettings, - StreamingCallSettings callSettings, + StreamingCallSettings streamingCallSettings, ClientContext clientContext) { return GrpcCallableFactory.createClientStreamingCallable( - grpcCallSettings, callSettings, clientContext); + grpcCallSettings, streamingCallSettings, clientContext); } } diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta1/stub/GrpcBigQueryStorageStub.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta1/stub/GrpcBigQueryStorageStub.java index 7bd61a7407..4ffa5f6309 100644 --- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta1/stub/GrpcBigQueryStorageStub.java +++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta1/stub/GrpcBigQueryStorageStub.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,9 +13,9 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - package com.google.cloud.bigquery.storage.v1beta1.stub; +import com.google.api.core.BetaApi; import com.google.api.gax.core.BackgroundResource; import com.google.api.gax.core.BackgroundResourceAggregation; import com.google.api.gax.grpc.GrpcCallSettings; @@ -24,9 +24,16 @@ import com.google.api.gax.rpc.RequestParamsExtractor; import com.google.api.gax.rpc.ServerStreamingCallable; import com.google.api.gax.rpc.UnaryCallable; -import com.google.cloud.bigquery.storage.v1beta1.Storage; +import com.google.cloud.bigquery.storage.v1beta1.Storage.BatchCreateReadSessionStreamsRequest; +import com.google.cloud.bigquery.storage.v1beta1.Storage.BatchCreateReadSessionStreamsResponse; +import com.google.cloud.bigquery.storage.v1beta1.Storage.CreateReadSessionRequest; +import com.google.cloud.bigquery.storage.v1beta1.Storage.FinalizeStreamRequest; +import com.google.cloud.bigquery.storage.v1beta1.Storage.ReadRowsRequest; +import com.google.cloud.bigquery.storage.v1beta1.Storage.ReadRowsResponse; +import com.google.cloud.bigquery.storage.v1beta1.Storage.ReadSession; +import com.google.cloud.bigquery.storage.v1beta1.Storage.SplitReadStreamRequest; +import com.google.cloud.bigquery.storage.v1beta1.Storage.SplitReadStreamResponse; import com.google.common.collect.ImmutableMap; -import com.google.longrunning.stub.GrpcOperationsStub; import com.google.protobuf.Empty; import io.grpc.MethodDescriptor; import io.grpc.protobuf.ProtoUtils; @@ -35,95 +42,81 @@ import java.util.concurrent.TimeUnit; import javax.annotation.Generated; -// AUTO-GENERATED DOCUMENTATION AND CLASS. +// AUTO-GENERATED DOCUMENTATION AND CLASS /** - * gRPC stub implementation for the BigQueryStorage service API. + * gRPC stub implementation for BigQuery Storage API. * *

This class is for advanced usage and reflects the underlying API directly. */ -@Generated("by gapic-generator-java") +@Generated("by gapic-generator") +@BetaApi("A restructuring of stub classes is planned, so this may break in the future") public class GrpcBigQueryStorageStub extends BigQueryStorageStub { - private static final MethodDescriptor + + private static final MethodDescriptor createReadSessionMethodDescriptor = - MethodDescriptor.newBuilder() + MethodDescriptor.newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName( "google.cloud.bigquery.storage.v1beta1.BigQueryStorage/CreateReadSession") .setRequestMarshaller( - ProtoUtils.marshaller(Storage.CreateReadSessionRequest.getDefaultInstance())) - .setResponseMarshaller( - ProtoUtils.marshaller(Storage.ReadSession.getDefaultInstance())) + ProtoUtils.marshaller(CreateReadSessionRequest.getDefaultInstance())) + .setResponseMarshaller(ProtoUtils.marshaller(ReadSession.getDefaultInstance())) .build(); - - private static final MethodDescriptor + private static final MethodDescriptor readRowsMethodDescriptor = - MethodDescriptor.newBuilder() + MethodDescriptor.newBuilder() .setType(MethodDescriptor.MethodType.SERVER_STREAMING) .setFullMethodName("google.cloud.bigquery.storage.v1beta1.BigQueryStorage/ReadRows") - .setRequestMarshaller( - ProtoUtils.marshaller(Storage.ReadRowsRequest.getDefaultInstance())) - .setResponseMarshaller( - ProtoUtils.marshaller(Storage.ReadRowsResponse.getDefaultInstance())) + .setRequestMarshaller(ProtoUtils.marshaller(ReadRowsRequest.getDefaultInstance())) + .setResponseMarshaller(ProtoUtils.marshaller(ReadRowsResponse.getDefaultInstance())) .build(); - private static final MethodDescriptor< - Storage.BatchCreateReadSessionStreamsRequest, - Storage.BatchCreateReadSessionStreamsResponse> + BatchCreateReadSessionStreamsRequest, BatchCreateReadSessionStreamsResponse> batchCreateReadSessionStreamsMethodDescriptor = MethodDescriptor - . + . newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName( "google.cloud.bigquery.storage.v1beta1.BigQueryStorage/BatchCreateReadSessionStreams") .setRequestMarshaller( - ProtoUtils.marshaller( - Storage.BatchCreateReadSessionStreamsRequest.getDefaultInstance())) + ProtoUtils.marshaller(BatchCreateReadSessionStreamsRequest.getDefaultInstance())) .setResponseMarshaller( - ProtoUtils.marshaller( - Storage.BatchCreateReadSessionStreamsResponse.getDefaultInstance())) + ProtoUtils.marshaller(BatchCreateReadSessionStreamsResponse.getDefaultInstance())) .build(); - - private static final MethodDescriptor + private static final MethodDescriptor finalizeStreamMethodDescriptor = - MethodDescriptor.newBuilder() + MethodDescriptor.newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName( "google.cloud.bigquery.storage.v1beta1.BigQueryStorage/FinalizeStream") .setRequestMarshaller( - ProtoUtils.marshaller(Storage.FinalizeStreamRequest.getDefaultInstance())) + ProtoUtils.marshaller(FinalizeStreamRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(Empty.getDefaultInstance())) .build(); - - private static final MethodDescriptor< - Storage.SplitReadStreamRequest, Storage.SplitReadStreamResponse> + private static final MethodDescriptor splitReadStreamMethodDescriptor = - MethodDescriptor - .newBuilder() + MethodDescriptor.newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName( "google.cloud.bigquery.storage.v1beta1.BigQueryStorage/SplitReadStream") .setRequestMarshaller( - ProtoUtils.marshaller(Storage.SplitReadStreamRequest.getDefaultInstance())) + ProtoUtils.marshaller(SplitReadStreamRequest.getDefaultInstance())) .setResponseMarshaller( - ProtoUtils.marshaller(Storage.SplitReadStreamResponse.getDefaultInstance())) + ProtoUtils.marshaller(SplitReadStreamResponse.getDefaultInstance())) .build(); - private final UnaryCallable - createReadSessionCallable; - private final ServerStreamingCallable - readRowsCallable; + private final BackgroundResource backgroundResources; + + private final UnaryCallable createReadSessionCallable; + private final ServerStreamingCallable readRowsCallable; private final UnaryCallable< - Storage.BatchCreateReadSessionStreamsRequest, - Storage.BatchCreateReadSessionStreamsResponse> + BatchCreateReadSessionStreamsRequest, BatchCreateReadSessionStreamsResponse> batchCreateReadSessionStreamsCallable; - private final UnaryCallable finalizeStreamCallable; - private final UnaryCallable + private final UnaryCallable finalizeStreamCallable; + private final UnaryCallable splitReadStreamCallable; - private final BackgroundResource backgroundResources; - private final GrpcOperationsStub operationsStub; private final GrpcStubCallableFactory callableFactory; public static final GrpcBigQueryStorageStub create(BigQueryStorageStubSettings settings) @@ -143,42 +136,53 @@ public static final GrpcBigQueryStorageStub create( BigQueryStorageStubSettings.newBuilder().build(), clientContext, callableFactory); } + /** + * Constructs an instance of GrpcBigQueryStorageStub, using the given settings. This is protected + * so that it is easy to make a subclass, but otherwise, the static factory methods should be + * preferred. + */ protected GrpcBigQueryStorageStub( BigQueryStorageStubSettings settings, ClientContext clientContext) throws IOException { this(settings, clientContext, new GrpcBigQueryStorageCallableFactory()); } + /** + * Constructs an instance of GrpcBigQueryStorageStub, using the given settings. This is protected + * so that it is easy to make a subclass, but otherwise, the static factory methods should be + * preferred. + */ protected GrpcBigQueryStorageStub( BigQueryStorageStubSettings settings, ClientContext clientContext, GrpcStubCallableFactory callableFactory) throws IOException { this.callableFactory = callableFactory; - this.operationsStub = GrpcOperationsStub.create(clientContext, callableFactory); - GrpcCallSettings - createReadSessionTransportSettings = - GrpcCallSettings.newBuilder() - .setMethodDescriptor(createReadSessionMethodDescriptor) - .setParamsExtractor( - new RequestParamsExtractor() { - @Override - public Map extract(Storage.CreateReadSessionRequest request) { - ImmutableMap.Builder params = ImmutableMap.builder(); - params.put( - "table_reference.project_id", - String.valueOf(request.getTableReference().getProjectId())); - return params.build(); - } - }) - .build(); - GrpcCallSettings readRowsTransportSettings = - GrpcCallSettings.newBuilder() + GrpcCallSettings createReadSessionTransportSettings = + GrpcCallSettings.newBuilder() + .setMethodDescriptor(createReadSessionMethodDescriptor) + .setParamsExtractor( + new RequestParamsExtractor() { + @Override + public Map extract(CreateReadSessionRequest request) { + ImmutableMap.Builder params = ImmutableMap.builder(); + params.put( + "table_reference.project_id", + String.valueOf(request.getTableReference().getProjectId())); + params.put( + "table_reference.dataset_id", + String.valueOf(request.getTableReference().getDatasetId())); + return params.build(); + } + }) + .build(); + GrpcCallSettings readRowsTransportSettings = + GrpcCallSettings.newBuilder() .setMethodDescriptor(readRowsMethodDescriptor) .setParamsExtractor( - new RequestParamsExtractor() { + new RequestParamsExtractor() { @Override - public Map extract(Storage.ReadRowsRequest request) { + public Map extract(ReadRowsRequest request) { ImmutableMap.Builder params = ImmutableMap.builder(); params.put( "read_position.stream.name", @@ -187,48 +191,44 @@ public Map extract(Storage.ReadRowsRequest request) { } }) .build(); - GrpcCallSettings< - Storage.BatchCreateReadSessionStreamsRequest, - Storage.BatchCreateReadSessionStreamsResponse> + GrpcCallSettings batchCreateReadSessionStreamsTransportSettings = GrpcCallSettings - . + . newBuilder() .setMethodDescriptor(batchCreateReadSessionStreamsMethodDescriptor) .setParamsExtractor( - new RequestParamsExtractor() { + new RequestParamsExtractor() { @Override public Map extract( - Storage.BatchCreateReadSessionStreamsRequest request) { + BatchCreateReadSessionStreamsRequest request) { ImmutableMap.Builder params = ImmutableMap.builder(); params.put("session.name", String.valueOf(request.getSession().getName())); return params.build(); } }) .build(); - GrpcCallSettings finalizeStreamTransportSettings = - GrpcCallSettings.newBuilder() + GrpcCallSettings finalizeStreamTransportSettings = + GrpcCallSettings.newBuilder() .setMethodDescriptor(finalizeStreamMethodDescriptor) .setParamsExtractor( - new RequestParamsExtractor() { + new RequestParamsExtractor() { @Override - public Map extract(Storage.FinalizeStreamRequest request) { + public Map extract(FinalizeStreamRequest request) { ImmutableMap.Builder params = ImmutableMap.builder(); params.put("stream.name", String.valueOf(request.getStream().getName())); return params.build(); } }) .build(); - GrpcCallSettings + GrpcCallSettings splitReadStreamTransportSettings = - GrpcCallSettings - .newBuilder() + GrpcCallSettings.newBuilder() .setMethodDescriptor(splitReadStreamMethodDescriptor) .setParamsExtractor( - new RequestParamsExtractor() { + new RequestParamsExtractor() { @Override - public Map extract(Storage.SplitReadStreamRequest request) { + public Map extract(SplitReadStreamRequest request) { ImmutableMap.Builder params = ImmutableMap.builder(); params.put( "original_stream.name", @@ -258,37 +258,27 @@ public Map extract(Storage.SplitReadStreamRequest request) { callableFactory.createUnaryCallable( splitReadStreamTransportSettings, settings.splitReadStreamSettings(), clientContext); - this.backgroundResources = - new BackgroundResourceAggregation(clientContext.getBackgroundResources()); - } - - public GrpcOperationsStub getOperationsStub() { - return operationsStub; + backgroundResources = new BackgroundResourceAggregation(clientContext.getBackgroundResources()); } - public UnaryCallable - createReadSessionCallable() { + public UnaryCallable createReadSessionCallable() { return createReadSessionCallable; } - public ServerStreamingCallable - readRowsCallable() { + public ServerStreamingCallable readRowsCallable() { return readRowsCallable; } - public UnaryCallable< - Storage.BatchCreateReadSessionStreamsRequest, - Storage.BatchCreateReadSessionStreamsResponse> + public UnaryCallable batchCreateReadSessionStreamsCallable() { return batchCreateReadSessionStreamsCallable; } - public UnaryCallable finalizeStreamCallable() { + public UnaryCallable finalizeStreamCallable() { return finalizeStreamCallable; } - public UnaryCallable - splitReadStreamCallable() { + public UnaryCallable splitReadStreamCallable() { return splitReadStreamCallable; } diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/BaseBigQueryReadClient.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/BaseBigQueryReadClient.java index fc42c63e5f..12ac3ce6ca 100644 --- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/BaseBigQueryReadClient.java +++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/BaseBigQueryReadClient.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,7 +13,6 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - package com.google.cloud.bigquery.storage.v1beta2; import com.google.api.core.BetaApi; @@ -26,7 +25,7 @@ import java.util.concurrent.TimeUnit; import javax.annotation.Generated; -// AUTO-GENERATED DOCUMENTATION AND CLASS. +// AUTO-GENERATED DOCUMENTATION AND SERVICE /** * Service Description: BigQuery Read API. * @@ -38,7 +37,18 @@ *

This class provides the ability to make remote calls to the backing service through method * calls that map to API methods. Sample code to get started: * - *

Note: close() needs to be called on the BaseBigQueryReadClient object to clean up resources + *

+ * 
+ * try (BaseBigQueryReadClient baseBigQueryReadClient = BaseBigQueryReadClient.create()) {
+ *   ProjectName parent = ProjectName.of("[PROJECT]");
+ *   ReadSession readSession = ReadSession.newBuilder().build();
+ *   int maxStreamCount = 0;
+ *   ReadSession response = baseBigQueryReadClient.createReadSession(parent, readSession, maxStreamCount);
+ * }
+ * 
+ * 
+ * + *

Note: close() needs to be called on the baseBigQueryReadClient object to clean up resources * such as threads. In the example above, try-with-resources is used, which automatically calls * close(). * @@ -67,28 +77,30 @@ * *

To customize credentials: * - *

{@code
+ * 
+ * 
  * BaseBigQueryReadSettings baseBigQueryReadSettings =
  *     BaseBigQueryReadSettings.newBuilder()
  *         .setCredentialsProvider(FixedCredentialsProvider.create(myCredentials))
  *         .build();
  * BaseBigQueryReadClient baseBigQueryReadClient =
  *     BaseBigQueryReadClient.create(baseBigQueryReadSettings);
- * }
+ * + *
* - *

To customize the endpoint: + * To customize the endpoint: * - *

{@code
+ * 
+ * 
  * BaseBigQueryReadSettings baseBigQueryReadSettings =
  *     BaseBigQueryReadSettings.newBuilder().setEndpoint(myEndpoint).build();
  * BaseBigQueryReadClient baseBigQueryReadClient =
  *     BaseBigQueryReadClient.create(baseBigQueryReadSettings);
- * }
- * - *

Please refer to the GitHub repository's samples for more quickstart code snippets. + * + *

*/ -@BetaApi @Generated("by gapic-generator") +@BetaApi public class BaseBigQueryReadClient implements BackgroundResource { private final BaseBigQueryReadSettings settings; private final BigQueryReadStub stub; @@ -109,7 +121,7 @@ public static final BaseBigQueryReadClient create(BaseBigQueryReadSettings setti /** * Constructs an instance of BaseBigQueryReadClient, using the given stub for making calls. This - * is for advanced usage - prefer using create(BaseBigQueryReadSettings). + * is for advanced usage - prefer to use BaseBigQueryReadSettings}. */ @BetaApi("A restructuring of stub classes is planned, so this may break in the future") public static final BaseBigQueryReadClient create(BigQueryReadStub stub) { @@ -141,7 +153,7 @@ public BigQueryReadStub getStub() { return stub; } - // AUTO-GENERATED DOCUMENTATION AND METHOD. + // AUTO-GENERATED DOCUMENTATION AND METHOD /** * Creates a new read session. A read session divides the contents of a BigQuery table into one or * more streams, which can then be used to read data from the table. The read session also @@ -160,14 +172,25 @@ public BigQueryReadStub getStub() { *

Read sessions automatically expire 24 hours after they are created and do not require manual * clean-up by the caller. * + *

Sample code: + * + *


+   * try (BaseBigQueryReadClient baseBigQueryReadClient = BaseBigQueryReadClient.create()) {
+   *   ProjectName parent = ProjectName.of("[PROJECT]");
+   *   ReadSession readSession = ReadSession.newBuilder().build();
+   *   int maxStreamCount = 0;
+   *   ReadSession response = baseBigQueryReadClient.createReadSession(parent, readSession, maxStreamCount);
+   * }
+   * 
+ * * @param parent Required. The request project that owns the session, in the form of * `projects/{project_id}`. - * @param read_session Required. Session to be created. - * @param max_stream_count Max initial number of streams. If unset or zero, the server will - * provide a value of streams so as to produce reasonable throughput. Must be non-negative. - * The number of streams may be lower than the requested number, depending on the amount - * parallelism that is reasonable for the table. Error will be returned if the max count is - * greater than the current system max limit of 1,000. + * @param readSession Required. Session to be created. + * @param maxStreamCount Max initial number of streams. If unset or zero, the server will provide + * a value of streams so as to produce reasonable throughput. Must be non-negative. The number + * of streams may be lower than the requested number, depending on the amount parallelism that + * is reasonable for the table. Error will be returned if the max count is greater than the + * current system max limit of 1,000. *

Streams must be read starting from offset 0. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -182,7 +205,7 @@ public final ReadSession createReadSession( return createReadSession(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD. + // AUTO-GENERATED DOCUMENTATION AND METHOD /** * Creates a new read session. A read session divides the contents of a BigQuery table into one or * more streams, which can then be used to read data from the table. The read session also @@ -201,14 +224,25 @@ public final ReadSession createReadSession( *

Read sessions automatically expire 24 hours after they are created and do not require manual * clean-up by the caller. * + *

Sample code: + * + *


+   * try (BaseBigQueryReadClient baseBigQueryReadClient = BaseBigQueryReadClient.create()) {
+   *   ProjectName parent = ProjectName.of("[PROJECT]");
+   *   ReadSession readSession = ReadSession.newBuilder().build();
+   *   int maxStreamCount = 0;
+   *   ReadSession response = baseBigQueryReadClient.createReadSession(parent.toString(), readSession, maxStreamCount);
+   * }
+   * 
+ * * @param parent Required. The request project that owns the session, in the form of * `projects/{project_id}`. - * @param read_session Required. Session to be created. - * @param max_stream_count Max initial number of streams. If unset or zero, the server will - * provide a value of streams so as to produce reasonable throughput. Must be non-negative. - * The number of streams may be lower than the requested number, depending on the amount - * parallelism that is reasonable for the table. Error will be returned if the max count is - * greater than the current system max limit of 1,000. + * @param readSession Required. Session to be created. + * @param maxStreamCount Max initial number of streams. If unset or zero, the server will provide + * a value of streams so as to produce reasonable throughput. Must be non-negative. The number + * of streams may be lower than the requested number, depending on the amount parallelism that + * is reasonable for the table. Error will be returned if the max count is greater than the + * current system max limit of 1,000. *

Streams must be read starting from offset 0. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -223,7 +257,7 @@ public final ReadSession createReadSession( return createReadSession(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD. + // AUTO-GENERATED DOCUMENTATION AND METHOD /** * Creates a new read session. A read session divides the contents of a BigQuery table into one or * more streams, which can then be used to read data from the table. The read session also @@ -242,6 +276,20 @@ public final ReadSession createReadSession( *

Read sessions automatically expire 24 hours after they are created and do not require manual * clean-up by the caller. * + *

Sample code: + * + *


+   * try (BaseBigQueryReadClient baseBigQueryReadClient = BaseBigQueryReadClient.create()) {
+   *   ProjectName parent = ProjectName.of("[PROJECT]");
+   *   ReadSession readSession = ReadSession.newBuilder().build();
+   *   CreateReadSessionRequest request = CreateReadSessionRequest.newBuilder()
+   *     .setParent(parent.toString())
+   *     .setReadSession(readSession)
+   *     .build();
+   *   ReadSession response = baseBigQueryReadClient.createReadSession(request);
+   * }
+   * 
+ * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -249,7 +297,7 @@ public final ReadSession createReadSession(CreateReadSessionRequest request) { return createReadSessionCallable().call(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD. + // AUTO-GENERATED DOCUMENTATION AND METHOD /** * Creates a new read session. A read session divides the contents of a BigQuery table into one or * more streams, which can then be used to read data from the table. The read session also @@ -269,12 +317,26 @@ public final ReadSession createReadSession(CreateReadSessionRequest request) { * clean-up by the caller. * *

Sample code: + * + *


+   * try (BaseBigQueryReadClient baseBigQueryReadClient = BaseBigQueryReadClient.create()) {
+   *   ProjectName parent = ProjectName.of("[PROJECT]");
+   *   ReadSession readSession = ReadSession.newBuilder().build();
+   *   CreateReadSessionRequest request = CreateReadSessionRequest.newBuilder()
+   *     .setParent(parent.toString())
+   *     .setReadSession(readSession)
+   *     .build();
+   *   ApiFuture<ReadSession> future = baseBigQueryReadClient.createReadSessionCallable().futureCall(request);
+   *   // Do something
+   *   ReadSession response = future.get();
+   * }
+   * 
*/ public final UnaryCallable createReadSessionCallable() { return stub.createReadSessionCallable(); } - // AUTO-GENERATED DOCUMENTATION AND METHOD. + // AUTO-GENERATED DOCUMENTATION AND METHOD /** * Reads rows from the stream in the format prescribed by the ReadSession. Each response contains * one or more table rows, up to a maximum of 100 MiB per response; read requests which attempt to @@ -284,12 +346,26 @@ public final UnaryCallable createReadSess * stream. * *

Sample code: + * + *


+   * try (BaseBigQueryReadClient baseBigQueryReadClient = BaseBigQueryReadClient.create()) {
+   *   ReadStreamName readStream = ReadStreamName.of("[PROJECT]", "[LOCATION]", "[SESSION]", "[STREAM]");
+   *   ReadRowsRequest request = ReadRowsRequest.newBuilder()
+   *     .setReadStream(readStream.toString())
+   *     .build();
+   *
+   *   ServerStream<ReadRowsResponse> stream = baseBigQueryReadClient.readRowsCallable().call(request);
+   *   for (ReadRowsResponse response : stream) {
+   *     // Do something when receive a response
+   *   }
+   * }
+   * 
*/ public final ServerStreamingCallable readRowsCallable() { return stub.readRowsCallable(); } - // AUTO-GENERATED DOCUMENTATION AND METHOD. + // AUTO-GENERATED DOCUMENTATION AND METHOD /** * Splits a given `ReadStream` into two `ReadStream` objects. These `ReadStream` objects are * referred to as the primary and the residual streams of the split. The original `ReadStream` can @@ -302,6 +378,18 @@ public final ServerStreamingCallable readRows * original[0-j] = primary[0-j] and original[j-n] = residual[0-m] once the streams have been read * to completion. * + *

Sample code: + * + *


+   * try (BaseBigQueryReadClient baseBigQueryReadClient = BaseBigQueryReadClient.create()) {
+   *   ReadStreamName name = ReadStreamName.of("[PROJECT]", "[LOCATION]", "[SESSION]", "[STREAM]");
+   *   SplitReadStreamRequest request = SplitReadStreamRequest.newBuilder()
+   *     .setName(name.toString())
+   *     .build();
+   *   SplitReadStreamResponse response = baseBigQueryReadClient.splitReadStream(request);
+   * }
+   * 
+ * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -309,7 +397,7 @@ public final SplitReadStreamResponse splitReadStream(SplitReadStreamRequest requ return splitReadStreamCallable().call(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD. + // AUTO-GENERATED DOCUMENTATION AND METHOD /** * Splits a given `ReadStream` into two `ReadStream` objects. These `ReadStream` objects are * referred to as the primary and the residual streams of the split. The original `ReadStream` can @@ -323,6 +411,18 @@ public final SplitReadStreamResponse splitReadStream(SplitReadStreamRequest requ * to completion. * *

Sample code: + * + *


+   * try (BaseBigQueryReadClient baseBigQueryReadClient = BaseBigQueryReadClient.create()) {
+   *   ReadStreamName name = ReadStreamName.of("[PROJECT]", "[LOCATION]", "[SESSION]", "[STREAM]");
+   *   SplitReadStreamRequest request = SplitReadStreamRequest.newBuilder()
+   *     .setName(name.toString())
+   *     .build();
+   *   ApiFuture<SplitReadStreamResponse> future = baseBigQueryReadClient.splitReadStreamCallable().futureCall(request);
+   *   // Do something
+   *   SplitReadStreamResponse response = future.get();
+   * }
+   * 
*/ public final UnaryCallable splitReadStreamCallable() { diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/BaseBigQueryReadSettings.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/BaseBigQueryReadSettings.java index 464224d2d1..6570a55fc8 100644 --- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/BaseBigQueryReadSettings.java +++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/BaseBigQueryReadSettings.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,7 +13,6 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - package com.google.cloud.bigquery.storage.v1beta2; import com.google.api.core.ApiFunction; @@ -32,7 +31,7 @@ import java.util.List; import javax.annotation.Generated; -// AUTO-GENERATED DOCUMENTATION AND CLASS. +// AUTO-GENERATED DOCUMENTATION AND CLASS /** * Settings class to configure an instance of {@link BaseBigQueryReadClient}. * @@ -50,24 +49,23 @@ * *

For example, to set the total timeout of createReadSession to 30 seconds: * - *

{@code
+ * 
+ * 
  * BaseBigQueryReadSettings.Builder baseBigQueryReadSettingsBuilder =
  *     BaseBigQueryReadSettings.newBuilder();
  * baseBigQueryReadSettingsBuilder
  *     .createReadSessionSettings()
  *     .setRetrySettings(
- *         baseBigQueryReadSettingsBuilder
- *             .createReadSessionSettings()
- *             .getRetrySettings()
- *             .toBuilder()
+ *         baseBigQueryReadSettingsBuilder.createReadSessionSettings().getRetrySettings().toBuilder()
  *             .setTotalTimeout(Duration.ofSeconds(30))
  *             .build());
  * BaseBigQueryReadSettings baseBigQueryReadSettings = baseBigQueryReadSettingsBuilder.build();
- * }
+ * + *
*/ -@Generated("by gapic-generator-java") +@Generated("by gapic-generator") +@BetaApi public class BaseBigQueryReadSettings extends ClientSettings { - /** Returns the object with the settings used for calls to createReadSession. */ public UnaryCallSettings createReadSessionSettings() { return ((BigQueryReadStubSettings) getStubSettings()).createReadSessionSettings(); @@ -144,15 +142,18 @@ protected BaseBigQueryReadSettings(Builder settingsBuilder) throws IOException { /** Builder for BaseBigQueryReadSettings. */ public static class Builder extends ClientSettings.Builder { - protected Builder() throws IOException { - this(((ClientContext) null)); + this((ClientContext) null); } protected Builder(ClientContext clientContext) { super(BigQueryReadStubSettings.newBuilder(clientContext)); } + private static Builder createDefault() { + return new Builder(BigQueryReadStubSettings.newBuilder()); + } + protected Builder(BaseBigQueryReadSettings settings) { super(settings.getStubSettings().toBuilder()); } @@ -161,15 +162,11 @@ protected Builder(BigQueryReadStubSettings.Builder stubSettings) { super(stubSettings); } - private static Builder createDefault() { - return new Builder(BigQueryReadStubSettings.newBuilder()); - } - public BigQueryReadStubSettings.Builder getStubSettingsBuilder() { return ((BigQueryReadStubSettings.Builder) getStubSettings()); } - // NEXT_MAJOR_VER: remove 'throws Exception'. + // NEXT_MAJOR_VER: remove 'throws Exception' /** * Applies the given settings updater function to all of the unary API methods in this service. * diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/BigQueryWriteClient.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/BigQueryWriteClient.java index a7c464f995..e902043f97 100644 --- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/BigQueryWriteClient.java +++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/BigQueryWriteClient.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,7 +13,6 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - package com.google.cloud.bigquery.storage.v1beta2; import com.google.api.core.BetaApi; @@ -26,7 +25,7 @@ import java.util.concurrent.TimeUnit; import javax.annotation.Generated; -// AUTO-GENERATED DOCUMENTATION AND CLASS. +// AUTO-GENERATED DOCUMENTATION AND SERVICE /** * Service Description: BigQuery Write API. * @@ -35,7 +34,17 @@ *

This class provides the ability to make remote calls to the backing service through method * calls that map to API methods. Sample code to get started: * - *

Note: close() needs to be called on the BigQueryWriteClient object to clean up resources such + *

+ * 
+ * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
+ *   TableName parent = TableName.of("[PROJECT]", "[DATASET]", "[TABLE]");
+ *   WriteStream writeStream = WriteStream.newBuilder().build();
+ *   WriteStream response = bigQueryWriteClient.createWriteStream(parent, writeStream);
+ * }
+ * 
+ * 
+ * + *

Note: close() needs to be called on the bigQueryWriteClient object to clean up resources such * as threads. In the example above, try-with-resources is used, which automatically calls close(). * *

The surface of this class includes several types of Java methods for each of the API's @@ -63,26 +72,30 @@ * *

To customize credentials: * - *

{@code
+ * 
+ * 
  * BigQueryWriteSettings bigQueryWriteSettings =
  *     BigQueryWriteSettings.newBuilder()
  *         .setCredentialsProvider(FixedCredentialsProvider.create(myCredentials))
  *         .build();
- * BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create(bigQueryWriteSettings);
- * }
+ * BigQueryWriteClient bigQueryWriteClient = + * BigQueryWriteClient.create(bigQueryWriteSettings); + * + *
* - *

To customize the endpoint: + * To customize the endpoint: * - *

{@code
+ * 
+ * 
  * BigQueryWriteSettings bigQueryWriteSettings =
  *     BigQueryWriteSettings.newBuilder().setEndpoint(myEndpoint).build();
- * BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create(bigQueryWriteSettings);
- * }
- * - *

Please refer to the GitHub repository's samples for more quickstart code snippets. + * BigQueryWriteClient bigQueryWriteClient = + * BigQueryWriteClient.create(bigQueryWriteSettings); + * + *

*/ -@BetaApi @Generated("by gapic-generator") +@BetaApi public class BigQueryWriteClient implements BackgroundResource { private final BigQueryWriteSettings settings; private final BigQueryWriteStub stub; @@ -103,7 +116,7 @@ public static final BigQueryWriteClient create(BigQueryWriteSettings settings) /** * Constructs an instance of BigQueryWriteClient, using the given stub for making calls. This is - * for advanced usage - prefer using create(BigQueryWriteSettings). + * for advanced usage - prefer to use BigQueryWriteSettings}. */ @BetaApi("A restructuring of stub classes is planned, so this may break in the future") public static final BigQueryWriteClient create(BigQueryWriteStub stub) { @@ -135,7 +148,7 @@ public BigQueryWriteStub getStub() { return stub; } - // AUTO-GENERATED DOCUMENTATION AND METHOD. + // AUTO-GENERATED DOCUMENTATION AND METHOD /** * Creates a write stream to the given table. Additionally, every table has a special COMMITTED * stream named '_default' to which data can be written. This stream doesn't need to be created @@ -143,9 +156,19 @@ public BigQueryWriteStub getStub() { * clients. Data written to this stream is considered committed as soon as an acknowledgement is * received. * + *

Sample code: + * + *


+   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
+   *   TableName parent = TableName.of("[PROJECT]", "[DATASET]", "[TABLE]");
+   *   WriteStream writeStream = WriteStream.newBuilder().build();
+   *   WriteStream response = bigQueryWriteClient.createWriteStream(parent, writeStream);
+   * }
+   * 
+ * * @param parent Required. Reference to the table to which the stream belongs, in the format of * `projects/{project}/datasets/{dataset}/tables/{table}`. - * @param write_stream Required. Stream to be created. + * @param writeStream Required. Stream to be created. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ public final WriteStream createWriteStream(TableName parent, WriteStream writeStream) { @@ -157,7 +180,7 @@ public final WriteStream createWriteStream(TableName parent, WriteStream writeSt return createWriteStream(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD. + // AUTO-GENERATED DOCUMENTATION AND METHOD /** * Creates a write stream to the given table. Additionally, every table has a special COMMITTED * stream named '_default' to which data can be written. This stream doesn't need to be created @@ -165,9 +188,19 @@ public final WriteStream createWriteStream(TableName parent, WriteStream writeSt * clients. Data written to this stream is considered committed as soon as an acknowledgement is * received. * + *

Sample code: + * + *


+   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
+   *   TableName parent = TableName.of("[PROJECT]", "[DATASET]", "[TABLE]");
+   *   WriteStream writeStream = WriteStream.newBuilder().build();
+   *   WriteStream response = bigQueryWriteClient.createWriteStream(parent.toString(), writeStream);
+   * }
+   * 
+ * * @param parent Required. Reference to the table to which the stream belongs, in the format of * `projects/{project}/datasets/{dataset}/tables/{table}`. - * @param write_stream Required. Stream to be created. + * @param writeStream Required. Stream to be created. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ public final WriteStream createWriteStream(String parent, WriteStream writeStream) { @@ -176,7 +209,7 @@ public final WriteStream createWriteStream(String parent, WriteStream writeStrea return createWriteStream(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD. + // AUTO-GENERATED DOCUMENTATION AND METHOD /** * Creates a write stream to the given table. Additionally, every table has a special COMMITTED * stream named '_default' to which data can be written. This stream doesn't need to be created @@ -184,6 +217,20 @@ public final WriteStream createWriteStream(String parent, WriteStream writeStrea * clients. Data written to this stream is considered committed as soon as an acknowledgement is * received. * + *

Sample code: + * + *


+   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
+   *   TableName parent = TableName.of("[PROJECT]", "[DATASET]", "[TABLE]");
+   *   WriteStream writeStream = WriteStream.newBuilder().build();
+   *   CreateWriteStreamRequest request = CreateWriteStreamRequest.newBuilder()
+   *     .setParent(parent.toString())
+   *     .setWriteStream(writeStream)
+   *     .build();
+   *   WriteStream response = bigQueryWriteClient.createWriteStream(request);
+   * }
+   * 
+ * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -191,7 +238,7 @@ public final WriteStream createWriteStream(CreateWriteStreamRequest request) { return createWriteStreamCallable().call(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD. + // AUTO-GENERATED DOCUMENTATION AND METHOD /** * Creates a write stream to the given table. Additionally, every table has a special COMMITTED * stream named '_default' to which data can be written. This stream doesn't need to be created @@ -200,12 +247,26 @@ public final WriteStream createWriteStream(CreateWriteStreamRequest request) { * received. * *

Sample code: + * + *


+   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
+   *   TableName parent = TableName.of("[PROJECT]", "[DATASET]", "[TABLE]");
+   *   WriteStream writeStream = WriteStream.newBuilder().build();
+   *   CreateWriteStreamRequest request = CreateWriteStreamRequest.newBuilder()
+   *     .setParent(parent.toString())
+   *     .setWriteStream(writeStream)
+   *     .build();
+   *   ApiFuture<WriteStream> future = bigQueryWriteClient.createWriteStreamCallable().futureCall(request);
+   *   // Do something
+   *   WriteStream response = future.get();
+   * }
+   * 
*/ public final UnaryCallable createWriteStreamCallable() { return stub.createWriteStreamCallable(); } - // AUTO-GENERATED DOCUMENTATION AND METHOD. + // AUTO-GENERATED DOCUMENTATION AND METHOD /** * Appends data to the given stream. * @@ -225,15 +286,40 @@ public final UnaryCallable createWriteStr * the stream is committed. * *

Sample code: + * + *


+   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
+   *   BidiStream<AppendRowsRequest, AppendRowsResponse> bidiStream =
+   *       bigQueryWriteClient.appendRowsCallable().call();
+   *
+   *   WriteStreamName writeStream = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]");
+   *   AppendRowsRequest request = AppendRowsRequest.newBuilder()
+   *     .setWriteStream(writeStream.toString())
+   *     .build();
+   *   bidiStream.send(request);
+   *   for (AppendRowsResponse response : bidiStream) {
+   *     // Do something when receive a response
+   *   }
+   * }
+   * 
*/ public final BidiStreamingCallable appendRowsCallable() { return stub.appendRowsCallable(); } - // AUTO-GENERATED DOCUMENTATION AND METHOD. + // AUTO-GENERATED DOCUMENTATION AND METHOD /** * Gets a write stream. * + *

Sample code: + * + *


+   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
+   *   WriteStreamName name = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]");
+   *   WriteStream response = bigQueryWriteClient.getWriteStream(name);
+   * }
+   * 
+ * * @param name Required. Name of the stream to get, in the form of * `projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}`. * @throws com.google.api.gax.rpc.ApiException if the remote call fails @@ -244,10 +330,19 @@ public final WriteStream getWriteStream(WriteStreamName name) { return getWriteStream(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD. + // AUTO-GENERATED DOCUMENTATION AND METHOD /** * Gets a write stream. * + *

Sample code: + * + *


+   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
+   *   WriteStreamName name = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]");
+   *   WriteStream response = bigQueryWriteClient.getWriteStream(name.toString());
+   * }
+   * 
+ * * @param name Required. Name of the stream to get, in the form of * `projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}`. * @throws com.google.api.gax.rpc.ApiException if the remote call fails @@ -257,10 +352,22 @@ public final WriteStream getWriteStream(String name) { return getWriteStream(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD. + // AUTO-GENERATED DOCUMENTATION AND METHOD /** * Gets a write stream. * + *

Sample code: + * + *


+   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
+   *   WriteStreamName name = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]");
+   *   GetWriteStreamRequest request = GetWriteStreamRequest.newBuilder()
+   *     .setName(name.toString())
+   *     .build();
+   *   WriteStream response = bigQueryWriteClient.getWriteStream(request);
+   * }
+   * 
+ * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -268,21 +375,42 @@ public final WriteStream getWriteStream(GetWriteStreamRequest request) { return getWriteStreamCallable().call(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD. + // AUTO-GENERATED DOCUMENTATION AND METHOD /** * Gets a write stream. * *

Sample code: + * + *


+   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
+   *   WriteStreamName name = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]");
+   *   GetWriteStreamRequest request = GetWriteStreamRequest.newBuilder()
+   *     .setName(name.toString())
+   *     .build();
+   *   ApiFuture<WriteStream> future = bigQueryWriteClient.getWriteStreamCallable().futureCall(request);
+   *   // Do something
+   *   WriteStream response = future.get();
+   * }
+   * 
*/ public final UnaryCallable getWriteStreamCallable() { return stub.getWriteStreamCallable(); } - // AUTO-GENERATED DOCUMENTATION AND METHOD. + // AUTO-GENERATED DOCUMENTATION AND METHOD /** * Finalize a write stream so that no new data can be appended to the stream. Finalize is not * supported on the '_default' stream. * + *

Sample code: + * + *


+   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
+   *   WriteStreamName name = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]");
+   *   FinalizeWriteStreamResponse response = bigQueryWriteClient.finalizeWriteStream(name);
+   * }
+   * 
+ * * @param name Required. Name of the stream to finalize, in the form of * `projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}`. * @throws com.google.api.gax.rpc.ApiException if the remote call fails @@ -295,11 +423,20 @@ public final FinalizeWriteStreamResponse finalizeWriteStream(WriteStreamName nam return finalizeWriteStream(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD. + // AUTO-GENERATED DOCUMENTATION AND METHOD /** * Finalize a write stream so that no new data can be appended to the stream. Finalize is not * supported on the '_default' stream. * + *

Sample code: + * + *


+   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
+   *   WriteStreamName name = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]");
+   *   FinalizeWriteStreamResponse response = bigQueryWriteClient.finalizeWriteStream(name.toString());
+   * }
+   * 
+ * * @param name Required. Name of the stream to finalize, in the form of * `projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}`. * @throws com.google.api.gax.rpc.ApiException if the remote call fails @@ -310,11 +447,23 @@ public final FinalizeWriteStreamResponse finalizeWriteStream(String name) { return finalizeWriteStream(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD. + // AUTO-GENERATED DOCUMENTATION AND METHOD /** * Finalize a write stream so that no new data can be appended to the stream. Finalize is not * supported on the '_default' stream. * + *

Sample code: + * + *


+   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
+   *   WriteStreamName name = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]");
+   *   FinalizeWriteStreamRequest request = FinalizeWriteStreamRequest.newBuilder()
+   *     .setName(name.toString())
+   *     .build();
+   *   FinalizeWriteStreamResponse response = bigQueryWriteClient.finalizeWriteStream(request);
+   * }
+   * 
+ * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -322,24 +471,45 @@ public final FinalizeWriteStreamResponse finalizeWriteStream(FinalizeWriteStream return finalizeWriteStreamCallable().call(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD. + // AUTO-GENERATED DOCUMENTATION AND METHOD /** * Finalize a write stream so that no new data can be appended to the stream. Finalize is not * supported on the '_default' stream. * *

Sample code: + * + *


+   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
+   *   WriteStreamName name = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]");
+   *   FinalizeWriteStreamRequest request = FinalizeWriteStreamRequest.newBuilder()
+   *     .setName(name.toString())
+   *     .build();
+   *   ApiFuture<FinalizeWriteStreamResponse> future = bigQueryWriteClient.finalizeWriteStreamCallable().futureCall(request);
+   *   // Do something
+   *   FinalizeWriteStreamResponse response = future.get();
+   * }
+   * 
*/ public final UnaryCallable finalizeWriteStreamCallable() { return stub.finalizeWriteStreamCallable(); } - // AUTO-GENERATED DOCUMENTATION AND METHOD. + // AUTO-GENERATED DOCUMENTATION AND METHOD /** * Atomically commits a group of `PENDING` streams that belong to the same `parent` table. Streams * must be finalized before commit and cannot be committed multiple times. Once a stream is * committed, data in the stream becomes available for read operations. * + *

Sample code: + * + *


+   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
+   *   String parent = "";
+   *   BatchCommitWriteStreamsResponse response = bigQueryWriteClient.batchCommitWriteStreams(parent);
+   * }
+   * 
+ * * @param parent Required. Parent table that all the streams should belong to, in the form of * `projects/{project}/datasets/{dataset}/tables/{table}`. * @throws com.google.api.gax.rpc.ApiException if the remote call fails @@ -350,12 +520,26 @@ public final BatchCommitWriteStreamsResponse batchCommitWriteStreams(String pare return batchCommitWriteStreams(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD. + // AUTO-GENERATED DOCUMENTATION AND METHOD /** * Atomically commits a group of `PENDING` streams that belong to the same `parent` table. Streams * must be finalized before commit and cannot be committed multiple times. Once a stream is * committed, data in the stream becomes available for read operations. * + *

Sample code: + * + *


+   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
+   *   String parent = "";
+   *   List<String> writeStreams = new ArrayList<>();
+   *   BatchCommitWriteStreamsRequest request = BatchCommitWriteStreamsRequest.newBuilder()
+   *     .setParent(parent)
+   *     .addAllWriteStreams(writeStreams)
+   *     .build();
+   *   BatchCommitWriteStreamsResponse response = bigQueryWriteClient.batchCommitWriteStreams(request);
+   * }
+   * 
+ * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -364,27 +548,50 @@ public final BatchCommitWriteStreamsResponse batchCommitWriteStreams( return batchCommitWriteStreamsCallable().call(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD. + // AUTO-GENERATED DOCUMENTATION AND METHOD /** * Atomically commits a group of `PENDING` streams that belong to the same `parent` table. Streams * must be finalized before commit and cannot be committed multiple times. Once a stream is * committed, data in the stream becomes available for read operations. * *

Sample code: + * + *


+   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
+   *   String parent = "";
+   *   List<String> writeStreams = new ArrayList<>();
+   *   BatchCommitWriteStreamsRequest request = BatchCommitWriteStreamsRequest.newBuilder()
+   *     .setParent(parent)
+   *     .addAllWriteStreams(writeStreams)
+   *     .build();
+   *   ApiFuture<BatchCommitWriteStreamsResponse> future = bigQueryWriteClient.batchCommitWriteStreamsCallable().futureCall(request);
+   *   // Do something
+   *   BatchCommitWriteStreamsResponse response = future.get();
+   * }
+   * 
*/ public final UnaryCallable batchCommitWriteStreamsCallable() { return stub.batchCommitWriteStreamsCallable(); } - // AUTO-GENERATED DOCUMENTATION AND METHOD. + // AUTO-GENERATED DOCUMENTATION AND METHOD /** * Flushes rows to a BUFFERED stream. If users are appending rows to BUFFERED stream, flush * operation is required in order for the rows to become available for reading. A Flush operation * flushes up to any previously flushed offset in a BUFFERED stream, to the offset specified in * the request. Flush is not supported on the _default stream, since it is not BUFFERED. * - * @param write_stream Required. The stream that is the target of the flush operation. + *

Sample code: + * + *


+   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
+   *   WriteStreamName writeStream = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]");
+   *   FlushRowsResponse response = bigQueryWriteClient.flushRows(writeStream);
+   * }
+   * 
+ * + * @param writeStream Required. The stream that is the target of the flush operation. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ public final FlushRowsResponse flushRows(WriteStreamName writeStream) { @@ -395,14 +602,23 @@ public final FlushRowsResponse flushRows(WriteStreamName writeStream) { return flushRows(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD. + // AUTO-GENERATED DOCUMENTATION AND METHOD /** * Flushes rows to a BUFFERED stream. If users are appending rows to BUFFERED stream, flush * operation is required in order for the rows to become available for reading. A Flush operation * flushes up to any previously flushed offset in a BUFFERED stream, to the offset specified in * the request. Flush is not supported on the _default stream, since it is not BUFFERED. * - * @param write_stream Required. The stream that is the target of the flush operation. + *

Sample code: + * + *


+   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
+   *   WriteStreamName writeStream = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]");
+   *   FlushRowsResponse response = bigQueryWriteClient.flushRows(writeStream.toString());
+   * }
+   * 
+ * + * @param writeStream Required. The stream that is the target of the flush operation. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ public final FlushRowsResponse flushRows(String writeStream) { @@ -410,13 +626,25 @@ public final FlushRowsResponse flushRows(String writeStream) { return flushRows(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD. + // AUTO-GENERATED DOCUMENTATION AND METHOD /** * Flushes rows to a BUFFERED stream. If users are appending rows to BUFFERED stream, flush * operation is required in order for the rows to become available for reading. A Flush operation * flushes up to any previously flushed offset in a BUFFERED stream, to the offset specified in * the request. Flush is not supported on the _default stream, since it is not BUFFERED. * + *

Sample code: + * + *


+   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
+   *   WriteStreamName writeStream = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]");
+   *   FlushRowsRequest request = FlushRowsRequest.newBuilder()
+   *     .setWriteStream(writeStream.toString())
+   *     .build();
+   *   FlushRowsResponse response = bigQueryWriteClient.flushRows(request);
+   * }
+   * 
+ * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -424,7 +652,7 @@ public final FlushRowsResponse flushRows(FlushRowsRequest request) { return flushRowsCallable().call(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD. + // AUTO-GENERATED DOCUMENTATION AND METHOD /** * Flushes rows to a BUFFERED stream. If users are appending rows to BUFFERED stream, flush * operation is required in order for the rows to become available for reading. A Flush operation @@ -432,6 +660,18 @@ public final FlushRowsResponse flushRows(FlushRowsRequest request) { * the request. Flush is not supported on the _default stream, since it is not BUFFERED. * *

Sample code: + * + *


+   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
+   *   WriteStreamName writeStream = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]");
+   *   FlushRowsRequest request = FlushRowsRequest.newBuilder()
+   *     .setWriteStream(writeStream.toString())
+   *     .build();
+   *   ApiFuture<FlushRowsResponse> future = bigQueryWriteClient.flushRowsCallable().futureCall(request);
+   *   // Do something
+   *   FlushRowsResponse response = future.get();
+   * }
+   * 
*/ public final UnaryCallable flushRowsCallable() { return stub.flushRowsCallable(); diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/BigQueryWriteSettings.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/BigQueryWriteSettings.java index 3ec1da642e..154534dbdc 100644 --- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/BigQueryWriteSettings.java +++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/BigQueryWriteSettings.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,7 +13,6 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - package com.google.cloud.bigquery.storage.v1beta2; import com.google.api.core.ApiFunction; @@ -32,7 +31,7 @@ import java.util.List; import javax.annotation.Generated; -// AUTO-GENERATED DOCUMENTATION AND CLASS. +// AUTO-GENERATED DOCUMENTATION AND CLASS /** * Settings class to configure an instance of {@link BigQueryWriteClient}. * @@ -50,23 +49,23 @@ * *

For example, to set the total timeout of createWriteStream to 30 seconds: * - *

{@code
- * BigQueryWriteSettings.Builder bigQueryWriteSettingsBuilder = BigQueryWriteSettings.newBuilder();
+ * 
+ * 
+ * BigQueryWriteSettings.Builder bigQueryWriteSettingsBuilder =
+ *     BigQueryWriteSettings.newBuilder();
  * bigQueryWriteSettingsBuilder
  *     .createWriteStreamSettings()
  *     .setRetrySettings(
- *         bigQueryWriteSettingsBuilder
- *             .createWriteStreamSettings()
- *             .getRetrySettings()
- *             .toBuilder()
+ *         bigQueryWriteSettingsBuilder.createWriteStreamSettings().getRetrySettings().toBuilder()
  *             .setTotalTimeout(Duration.ofSeconds(30))
  *             .build());
  * BigQueryWriteSettings bigQueryWriteSettings = bigQueryWriteSettingsBuilder.build();
- * }
+ * + *
*/ -@Generated("by gapic-generator-java") +@Generated("by gapic-generator") +@BetaApi public class BigQueryWriteSettings extends ClientSettings { - /** Returns the object with the settings used for calls to createWriteStream. */ public UnaryCallSettings createWriteStreamSettings() { return ((BigQueryWriteStubSettings) getStubSettings()).createWriteStreamSettings(); @@ -159,15 +158,18 @@ protected BigQueryWriteSettings(Builder settingsBuilder) throws IOException { /** Builder for BigQueryWriteSettings. */ public static class Builder extends ClientSettings.Builder { - protected Builder() throws IOException { - this(((ClientContext) null)); + this((ClientContext) null); } protected Builder(ClientContext clientContext) { super(BigQueryWriteStubSettings.newBuilder(clientContext)); } + private static Builder createDefault() { + return new Builder(BigQueryWriteStubSettings.newBuilder()); + } + protected Builder(BigQueryWriteSettings settings) { super(settings.getStubSettings().toBuilder()); } @@ -176,15 +178,11 @@ protected Builder(BigQueryWriteStubSettings.Builder stubSettings) { super(stubSettings); } - private static Builder createDefault() { - return new Builder(BigQueryWriteStubSettings.newBuilder()); - } - public BigQueryWriteStubSettings.Builder getStubSettingsBuilder() { return ((BigQueryWriteStubSettings.Builder) getStubSettings()); } - // NEXT_MAJOR_VER: remove 'throws Exception'. + // NEXT_MAJOR_VER: remove 'throws Exception' /** * Applies the given settings updater function to all of the unary API methods in this service. * diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/package-info.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/package-info.java index da92a24b0c..b53d96890d 100644 --- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/package-info.java +++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/package-info.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,9 +15,11 @@ */ /** - * The interfaces provided are listed below, along with usage samples. + * A client to BigQuery Storage API. * - *

======================= BigQueryReadClient ======================= + *

The interfaces provided are listed below, along with usage samples. + * + *

====================== BaseBigQueryReadClient ====================== * *

Service Description: BigQuery Read API. * @@ -26,17 +28,38 @@ *

New code should use the v1 Read API going forward, if they don't use Write API at the same * time. * - *

Sample for BigQueryReadClient: + *

Sample for BaseBigQueryReadClient: + * + *

+ * 
+ * try (BaseBigQueryReadClient baseBigQueryReadClient = BaseBigQueryReadClient.create()) {
+ *   ProjectName parent = ProjectName.of("[PROJECT]");
+ *   ReadSession readSession = ReadSession.newBuilder().build();
+ *   int maxStreamCount = 0;
+ *   ReadSession response = baseBigQueryReadClient.createReadSession(parent, readSession, maxStreamCount);
+ * }
+ * 
+ * 
* - *

======================= BigQueryWriteClient ======================= + * =================== BigQueryWriteClient =================== * *

Service Description: BigQuery Write API. * *

The Write API can be used to write data to BigQuery. * *

Sample for BigQueryWriteClient: + * + *

+ * 
+ * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
+ *   TableName parent = TableName.of("[PROJECT]", "[DATASET]", "[TABLE]");
+ *   WriteStream writeStream = WriteStream.newBuilder().build();
+ *   WriteStream response = bigQueryWriteClient.createWriteStream(parent, writeStream);
+ * }
+ * 
+ * 
*/ -@Generated("by gapic-generator-java") +@Generated("by gapic-generator") package com.google.cloud.bigquery.storage.v1beta2; import javax.annotation.Generated; diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/stub/BigQueryReadStub.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/stub/BigQueryReadStub.java index ee9bfd6aba..116f00c54f 100644 --- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/stub/BigQueryReadStub.java +++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/stub/BigQueryReadStub.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,9 +13,9 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - package com.google.cloud.bigquery.storage.v1beta2.stub; +import com.google.api.core.BetaApi; import com.google.api.gax.core.BackgroundResource; import com.google.api.gax.rpc.ServerStreamingCallable; import com.google.api.gax.rpc.UnaryCallable; @@ -27,13 +27,14 @@ import com.google.cloud.bigquery.storage.v1beta2.SplitReadStreamResponse; import javax.annotation.Generated; -// AUTO-GENERATED DOCUMENTATION AND CLASS. +// AUTO-GENERATED DOCUMENTATION AND CLASS /** - * Base stub class for the BigQueryRead service API. + * Base stub class for BigQuery Storage API. * *

This class is for advanced usage and reflects the underlying API directly. */ @Generated("by gapic-generator") +@BetaApi("A restructuring of stub classes is planned, so this may break in the future") public abstract class BigQueryReadStub implements BackgroundResource { public UnaryCallable createReadSessionCallable() { diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/stub/BigQueryReadStubSettings.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/stub/BigQueryReadStubSettings.java index bb79df916a..1e9f940a35 100644 --- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/stub/BigQueryReadStubSettings.java +++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/stub/BigQueryReadStubSettings.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,7 +13,6 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - package com.google.cloud.bigquery.storage.v1beta2.stub; import com.google.api.core.ApiFunction; @@ -47,7 +46,7 @@ import javax.annotation.Generated; import org.threeten.bp.Duration; -// AUTO-GENERATED DOCUMENTATION AND CLASS. +// AUTO-GENERATED DOCUMENTATION AND CLASS /** * Settings class to configure an instance of {@link BigQueryReadStub}. * @@ -65,28 +64,28 @@ * *

For example, to set the total timeout of createReadSession to 30 seconds: * - *

{@code
- * BigQueryReadStubSettings.Builder bigQueryReadSettingsBuilder =
+ * 
+ * 
+ * BigQueryReadStubSettings.Builder baseBigQueryReadSettingsBuilder =
  *     BigQueryReadStubSettings.newBuilder();
- * bigQueryReadSettingsBuilder
+ * baseBigQueryReadSettingsBuilder
  *     .createReadSessionSettings()
  *     .setRetrySettings(
- *         bigQueryReadSettingsBuilder
- *             .createReadSessionSettings()
- *             .getRetrySettings()
- *             .toBuilder()
+ *         baseBigQueryReadSettingsBuilder.createReadSessionSettings().getRetrySettings().toBuilder()
  *             .setTotalTimeout(Duration.ofSeconds(30))
  *             .build());
- * BigQueryReadStubSettings bigQueryReadSettings = bigQueryReadSettingsBuilder.build();
- * }
+ * BigQueryReadStubSettings baseBigQueryReadSettings = baseBigQueryReadSettingsBuilder.build(); + * + *
*/ +@Generated("by gapic-generator") @BetaApi -@Generated("by gapic-generator-java") public class BigQueryReadStubSettings extends StubSettings { /** The default scopes of the service. */ private static final ImmutableList DEFAULT_SERVICE_SCOPES = ImmutableList.builder() .add("https://www.googleapis.com/auth/bigquery") + .add("https://www.googleapis.com/auth/bigquery.insertdata") .add("https://www.googleapis.com/auth/bigquery.readonly") .add("https://www.googleapis.com/auth/cloud-platform") .build(); @@ -118,10 +117,10 @@ public BigQueryReadStub createStub() throws IOException { .getTransportName() .equals(GrpcTransportChannel.getGrpcTransportName())) { return GrpcBigQueryReadStub.create(this); + } else { + throw new UnsupportedOperationException( + "Transport not supported: " + getTransportChannelProvider().getTransportName()); } - throw new UnsupportedOperationException( - String.format( - "Transport not supported: %s", getTransportChannelProvider().getTransportName())); } /** Returns a builder for the default ExecutorProvider for this service. */ @@ -189,12 +188,14 @@ protected BigQueryReadStubSettings(Builder settingsBuilder) throws IOException { /** Builder for BigQueryReadStubSettings. */ public static class Builder extends StubSettings.Builder { private final ImmutableList> unaryMethodSettingsBuilders; + private final UnaryCallSettings.Builder createReadSessionSettings; private final ServerStreamingCallSettings.Builder readRowsSettings; private final UnaryCallSettings.Builder splitReadStreamSettings; + private static final ImmutableMap> RETRYABLE_CODE_DEFINITIONS; @@ -202,18 +203,36 @@ public static class Builder extends StubSettings.Builder> definitions = ImmutableMap.builder(); definitions.put( - "retry_policy_0_codes", + "retry_policy_1_codes", ImmutableSet.copyOf( Lists.newArrayList( StatusCode.Code.DEADLINE_EXCEEDED, StatusCode.Code.UNAVAILABLE))); definitions.put( - "retry_policy_1_codes", - ImmutableSet.copyOf(Lists.newArrayList(StatusCode.Code.UNAVAILABLE))); + "retry_policy_4_codes", + ImmutableSet.copyOf( + Lists.newArrayList( + StatusCode.Code.DEADLINE_EXCEEDED, + StatusCode.Code.UNAVAILABLE, + StatusCode.Code.RESOURCE_EXHAUSTED))); definitions.put( - "retry_policy_2_codes", + "retry_policy_6_codes", ImmutableSet.copyOf( Lists.newArrayList( StatusCode.Code.DEADLINE_EXCEEDED, StatusCode.Code.UNAVAILABLE))); + definitions.put("no_retry_codes", ImmutableSet.copyOf(Lists.newArrayList())); + definitions.put( + "retry_policy_3_codes", + ImmutableSet.copyOf( + Lists.newArrayList( + StatusCode.Code.DEADLINE_EXCEEDED, StatusCode.Code.UNAVAILABLE))); + definitions.put( + "retry_policy_2_codes", + ImmutableSet.copyOf(Lists.newArrayList(StatusCode.Code.UNAVAILABLE))); + definitions.put( + "retry_policy_5_codes", + ImmutableSet.copyOf( + Lists.newArrayList( + StatusCode.Code.UNAVAILABLE, StatusCode.Code.RESOURCE_EXHAUSTED))); RETRYABLE_CODE_DEFINITIONS = definitions.build(); } @@ -232,7 +251,29 @@ public static class Builder extends StubSettings.Builder>of( createReadSessionSettings, splitReadStreamSettings); - initDefaults(this); - } - protected Builder(BigQueryReadStubSettings settings) { - super(settings); - - createReadSessionSettings = settings.createReadSessionSettings.toBuilder(); - readRowsSettings = settings.readRowsSettings.toBuilder(); - splitReadStreamSettings = settings.splitReadStreamSettings.toBuilder(); - - unaryMethodSettingsBuilders = - ImmutableList.>of( - createReadSessionSettings, splitReadStreamSettings); + initDefaults(this); } private static Builder createDefault() { - Builder builder = new Builder(((ClientContext) null)); - + Builder builder = new Builder((ClientContext) null); builder.setTransportChannelProvider(defaultTransportChannelProvider()); builder.setCredentialsProvider(defaultCredentialsProviderBuilder().build()); builder.setInternalHeaderProvider(defaultApiClientHeaderProviderBuilder().build()); builder.setEndpoint(getDefaultEndpoint()); - return initDefaults(builder); } private static Builder initDefaults(Builder builder) { - builder - .createReadSessionSettings() - .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) - .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")); builder - .readRowsSettings() + .createReadSessionSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_1_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_1_params")); builder - .splitReadStreamSettings() + .readRowsSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_2_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_2_params")); + builder + .splitReadStreamSettings() + .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_3_codes")) + .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_3_params")); + return builder; } - // NEXT_MAJOR_VER: remove 'throws Exception'. + protected Builder(BigQueryReadStubSettings settings) { + super(settings); + + createReadSessionSettings = settings.createReadSessionSettings.toBuilder(); + readRowsSettings = settings.readRowsSettings.toBuilder(); + splitReadStreamSettings = settings.splitReadStreamSettings.toBuilder(); + + unaryMethodSettingsBuilders = + ImmutableList.>of( + createReadSessionSettings, splitReadStreamSettings); + } + + // NEXT_MAJOR_VER: remove 'throws Exception' /** * Applies the given settings updater function to all of the unary API methods in this service. * diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/stub/BigQueryWriteStub.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/stub/BigQueryWriteStub.java index cedc3d4d33..cc569eeb24 100644 --- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/stub/BigQueryWriteStub.java +++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/stub/BigQueryWriteStub.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,9 +13,9 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - package com.google.cloud.bigquery.storage.v1beta2.stub; +import com.google.api.core.BetaApi; import com.google.api.gax.core.BackgroundResource; import com.google.api.gax.rpc.BidiStreamingCallable; import com.google.api.gax.rpc.UnaryCallable; @@ -32,13 +32,14 @@ import com.google.cloud.bigquery.storage.v1beta2.WriteStream; import javax.annotation.Generated; -// AUTO-GENERATED DOCUMENTATION AND CLASS. +// AUTO-GENERATED DOCUMENTATION AND CLASS /** - * Base stub class for the BigQueryWrite service API. + * Base stub class for BigQuery Storage API. * *

This class is for advanced usage and reflects the underlying API directly. */ @Generated("by gapic-generator") +@BetaApi("A restructuring of stub classes is planned, so this may break in the future") public abstract class BigQueryWriteStub implements BackgroundResource { public UnaryCallable createWriteStreamCallable() { diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/stub/BigQueryWriteStubSettings.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/stub/BigQueryWriteStubSettings.java index ebe08eda20..4e945907d9 100644 --- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/stub/BigQueryWriteStubSettings.java +++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/stub/BigQueryWriteStubSettings.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,7 +13,6 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - package com.google.cloud.bigquery.storage.v1beta2.stub; import com.google.api.core.ApiFunction; @@ -52,7 +51,7 @@ import javax.annotation.Generated; import org.threeten.bp.Duration; -// AUTO-GENERATED DOCUMENTATION AND CLASS. +// AUTO-GENERATED DOCUMENTATION AND CLASS /** * Settings class to configure an instance of {@link BigQueryWriteStub}. * @@ -70,29 +69,29 @@ * *

For example, to set the total timeout of createWriteStream to 30 seconds: * - *

{@code
+ * 
+ * 
  * BigQueryWriteStubSettings.Builder bigQueryWriteSettingsBuilder =
  *     BigQueryWriteStubSettings.newBuilder();
  * bigQueryWriteSettingsBuilder
  *     .createWriteStreamSettings()
  *     .setRetrySettings(
- *         bigQueryWriteSettingsBuilder
- *             .createWriteStreamSettings()
- *             .getRetrySettings()
- *             .toBuilder()
+ *         bigQueryWriteSettingsBuilder.createWriteStreamSettings().getRetrySettings().toBuilder()
  *             .setTotalTimeout(Duration.ofSeconds(30))
  *             .build());
  * BigQueryWriteStubSettings bigQueryWriteSettings = bigQueryWriteSettingsBuilder.build();
- * }
+ * + *
*/ +@Generated("by gapic-generator") @BetaApi -@Generated("by gapic-generator-java") public class BigQueryWriteStubSettings extends StubSettings { /** The default scopes of the service. */ private static final ImmutableList DEFAULT_SERVICE_SCOPES = ImmutableList.builder() .add("https://www.googleapis.com/auth/bigquery") .add("https://www.googleapis.com/auth/bigquery.insertdata") + .add("https://www.googleapis.com/auth/bigquery.readonly") .add("https://www.googleapis.com/auth/cloud-platform") .build(); @@ -143,10 +142,10 @@ public BigQueryWriteStub createStub() throws IOException { .getTransportName() .equals(GrpcTransportChannel.getGrpcTransportName())) { return GrpcBigQueryWriteStub.create(this); + } else { + throw new UnsupportedOperationException( + "Transport not supported: " + getTransportChannelProvider().getTransportName()); } - throw new UnsupportedOperationException( - String.format( - "Transport not supported: %s", getTransportChannelProvider().getTransportName())); } /** Returns a builder for the default ExecutorProvider for this service. */ @@ -217,6 +216,7 @@ protected BigQueryWriteStubSettings(Builder settingsBuilder) throws IOException /** Builder for BigQueryWriteStubSettings. */ public static class Builder extends StubSettings.Builder { private final ImmutableList> unaryMethodSettingsBuilders; + private final UnaryCallSettings.Builder createWriteStreamSettings; private final StreamingCallSettings.Builder @@ -229,6 +229,7 @@ public static class Builder extends StubSettings.Builder batchCommitWriteStreamsSettings; private final UnaryCallSettings.Builder flushRowsSettings; + private static final ImmutableMap> RETRYABLE_CODE_DEFINITIONS; @@ -236,22 +237,36 @@ public static class Builder extends StubSettings.Builder> definitions = ImmutableMap.builder(); definitions.put( - "retry_policy_3_codes", + "retry_policy_1_codes", + ImmutableSet.copyOf( + Lists.newArrayList( + StatusCode.Code.DEADLINE_EXCEEDED, StatusCode.Code.UNAVAILABLE))); + definitions.put( + "retry_policy_4_codes", ImmutableSet.copyOf( Lists.newArrayList( StatusCode.Code.DEADLINE_EXCEEDED, StatusCode.Code.UNAVAILABLE, StatusCode.Code.RESOURCE_EXHAUSTED))); definitions.put( - "retry_policy_4_codes", + "retry_policy_6_codes", ImmutableSet.copyOf( Lists.newArrayList( - StatusCode.Code.UNAVAILABLE, StatusCode.Code.RESOURCE_EXHAUSTED))); + StatusCode.Code.DEADLINE_EXCEEDED, StatusCode.Code.UNAVAILABLE))); + definitions.put("no_retry_codes", ImmutableSet.copyOf(Lists.newArrayList())); definitions.put( - "retry_policy_5_codes", + "retry_policy_3_codes", ImmutableSet.copyOf( Lists.newArrayList( StatusCode.Code.DEADLINE_EXCEEDED, StatusCode.Code.UNAVAILABLE))); + definitions.put( + "retry_policy_2_codes", + ImmutableSet.copyOf(Lists.newArrayList(StatusCode.Code.UNAVAILABLE))); + definitions.put( + "retry_policy_5_codes", + ImmutableSet.copyOf( + Lists.newArrayList( + StatusCode.Code.UNAVAILABLE, StatusCode.Code.RESOURCE_EXHAUSTED))); RETRYABLE_CODE_DEFINITIONS = definitions.build(); } @@ -260,6 +275,28 @@ public static class Builder extends StubSettings.Builder definitions = ImmutableMap.builder(); RetrySettings settings = null; + settings = + RetrySettings.newBuilder() + .setInitialRetryDelay(Duration.ofMillis(100L)) + .setRetryDelayMultiplier(1.3) + .setMaxRetryDelay(Duration.ofMillis(60000L)) + .setInitialRpcTimeout(Duration.ofMillis(600000L)) + .setRpcTimeoutMultiplier(1.0) + .setMaxRpcTimeout(Duration.ofMillis(600000L)) + .setTotalTimeout(Duration.ofMillis(600000L)) + .build(); + definitions.put("retry_policy_1_params", settings); + settings = + RetrySettings.newBuilder() + .setInitialRetryDelay(Duration.ofMillis(100L)) + .setRetryDelayMultiplier(1.3) + .setMaxRetryDelay(Duration.ofMillis(60000L)) + .setInitialRpcTimeout(Duration.ofMillis(600000L)) + .setRpcTimeoutMultiplier(1.0) + .setMaxRpcTimeout(Duration.ofMillis(600000L)) + .setTotalTimeout(Duration.ofMillis(600000L)) + .build(); + definitions.put("retry_policy_6_params", settings); settings = RetrySettings.newBuilder() .setInitialRetryDelay(Duration.ofMillis(100L)) @@ -281,7 +318,7 @@ public static class Builder extends StubSettings.Builder>of( - createWriteStreamSettings, - getWriteStreamSettings, - finalizeWriteStreamSettings, - batchCommitWriteStreamsSettings, - flushRowsSettings); + initDefaults(this); } private static Builder createDefault() { - Builder builder = new Builder(((ClientContext) null)); - + Builder builder = new Builder((ClientContext) null); builder.setTransportChannelProvider(defaultTransportChannelProvider()); builder.setCredentialsProvider(defaultCredentialsProviderBuilder().build()); builder.setInternalHeaderProvider(defaultApiClientHeaderProviderBuilder().build()); builder.setEndpoint(getDefaultEndpoint()); - return initDefaults(builder); } private static Builder initDefaults(Builder builder) { + builder .createWriteStreamSettings() - .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_3_codes")) - .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_3_params")); + .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_4_codes")) + .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_4_params")); builder .getWriteStreamSettings() - .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_5_codes")) - .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_5_params")); + .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_6_codes")) + .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_6_params")); builder .finalizeWriteStreamSettings() - .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_5_codes")) - .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_5_params")); + .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_6_codes")) + .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_6_params")); builder .batchCommitWriteStreamsSettings() - .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_5_codes")) - .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_5_params")); + .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_6_codes")) + .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_6_params")); builder .flushRowsSettings() - .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_5_codes")) - .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_5_params")); + .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_6_codes")) + .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_6_params")); return builder; } - // NEXT_MAJOR_VER: remove 'throws Exception'. + protected Builder(BigQueryWriteStubSettings settings) { + super(settings); + + createWriteStreamSettings = settings.createWriteStreamSettings.toBuilder(); + appendRowsSettings = settings.appendRowsSettings.toBuilder(); + getWriteStreamSettings = settings.getWriteStreamSettings.toBuilder(); + finalizeWriteStreamSettings = settings.finalizeWriteStreamSettings.toBuilder(); + batchCommitWriteStreamsSettings = settings.batchCommitWriteStreamsSettings.toBuilder(); + flushRowsSettings = settings.flushRowsSettings.toBuilder(); + + unaryMethodSettingsBuilders = + ImmutableList.>of( + createWriteStreamSettings, + getWriteStreamSettings, + finalizeWriteStreamSettings, + batchCommitWriteStreamsSettings, + flushRowsSettings); + } + + // NEXT_MAJOR_VER: remove 'throws Exception' /** * Applies the given settings updater function to all of the unary API methods in this service. * diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/stub/GrpcBigQueryReadCallableFactory.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/stub/GrpcBigQueryReadCallableFactory.java index 85844b1ed9..a66a898f09 100644 --- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/stub/GrpcBigQueryReadCallableFactory.java +++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/stub/GrpcBigQueryReadCallableFactory.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,9 +13,9 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - package com.google.cloud.bigquery.storage.v1beta2.stub; +import com.google.api.core.BetaApi; import com.google.api.gax.grpc.GrpcCallSettings; import com.google.api.gax.grpc.GrpcCallableFactory; import com.google.api.gax.grpc.GrpcStubCallableFactory; @@ -31,19 +31,18 @@ import com.google.api.gax.rpc.StreamingCallSettings; import com.google.api.gax.rpc.UnaryCallSettings; import com.google.api.gax.rpc.UnaryCallable; -import com.google.longrunning.Operation; import com.google.longrunning.stub.OperationsStub; import javax.annotation.Generated; -// AUTO-GENERATED DOCUMENTATION AND CLASS. +// AUTO-GENERATED DOCUMENTATION AND CLASS /** - * gRPC callable factory implementation for the BigQueryRead service API. + * gRPC callable factory implementation for BigQuery Storage API. * *

This class is for advanced usage. */ @Generated("by gapic-generator") +@BetaApi("The surface for use by generated code is not stable yet and may change in the future.") public class GrpcBigQueryReadCallableFactory implements GrpcStubCallableFactory { - @Override public UnaryCallable createUnaryCallable( GrpcCallSettings grpcCallSettings, @@ -56,58 +55,61 @@ public UnaryCallable createUnaryCalla public UnaryCallable createPagedCallable( GrpcCallSettings grpcCallSettings, - PagedCallSettings callSettings, + PagedCallSettings pagedCallSettings, ClientContext clientContext) { - return GrpcCallableFactory.createPagedCallable(grpcCallSettings, callSettings, clientContext); + return GrpcCallableFactory.createPagedCallable( + grpcCallSettings, pagedCallSettings, clientContext); } @Override public UnaryCallable createBatchingCallable( GrpcCallSettings grpcCallSettings, - BatchingCallSettings callSettings, + BatchingCallSettings batchingCallSettings, ClientContext clientContext) { return GrpcCallableFactory.createBatchingCallable( - grpcCallSettings, callSettings, clientContext); + grpcCallSettings, batchingCallSettings, clientContext); } + @BetaApi( + "The surface for long-running operations is not stable yet and may change in the future.") @Override public OperationCallable createOperationCallable( - GrpcCallSettings grpcCallSettings, - OperationCallSettings callSettings, + GrpcCallSettings grpcCallSettings, + OperationCallSettings operationCallSettings, ClientContext clientContext, OperationsStub operationsStub) { return GrpcCallableFactory.createOperationCallable( - grpcCallSettings, callSettings, clientContext, operationsStub); + grpcCallSettings, operationCallSettings, clientContext, operationsStub); } @Override public BidiStreamingCallable createBidiStreamingCallable( GrpcCallSettings grpcCallSettings, - StreamingCallSettings callSettings, + StreamingCallSettings streamingCallSettings, ClientContext clientContext) { return GrpcCallableFactory.createBidiStreamingCallable( - grpcCallSettings, callSettings, clientContext); + grpcCallSettings, streamingCallSettings, clientContext); } @Override public ServerStreamingCallable createServerStreamingCallable( GrpcCallSettings grpcCallSettings, - ServerStreamingCallSettings callSettings, + ServerStreamingCallSettings streamingCallSettings, ClientContext clientContext) { return GrpcCallableFactory.createServerStreamingCallable( - grpcCallSettings, callSettings, clientContext); + grpcCallSettings, streamingCallSettings, clientContext); } @Override public ClientStreamingCallable createClientStreamingCallable( GrpcCallSettings grpcCallSettings, - StreamingCallSettings callSettings, + StreamingCallSettings streamingCallSettings, ClientContext clientContext) { return GrpcCallableFactory.createClientStreamingCallable( - grpcCallSettings, callSettings, clientContext); + grpcCallSettings, streamingCallSettings, clientContext); } } diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/stub/GrpcBigQueryReadStub.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/stub/GrpcBigQueryReadStub.java index 158fe6200c..13f589f0af 100644 --- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/stub/GrpcBigQueryReadStub.java +++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/stub/GrpcBigQueryReadStub.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,9 +13,9 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - package com.google.cloud.bigquery.storage.v1beta2.stub; +import com.google.api.core.BetaApi; import com.google.api.gax.core.BackgroundResource; import com.google.api.gax.core.BackgroundResourceAggregation; import com.google.api.gax.grpc.GrpcCallSettings; @@ -31,7 +31,6 @@ import com.google.cloud.bigquery.storage.v1beta2.SplitReadStreamRequest; import com.google.cloud.bigquery.storage.v1beta2.SplitReadStreamResponse; import com.google.common.collect.ImmutableMap; -import com.google.longrunning.stub.GrpcOperationsStub; import io.grpc.MethodDescriptor; import io.grpc.protobuf.ProtoUtils; import java.io.IOException; @@ -39,14 +38,16 @@ import java.util.concurrent.TimeUnit; import javax.annotation.Generated; -// AUTO-GENERATED DOCUMENTATION AND CLASS. +// AUTO-GENERATED DOCUMENTATION AND CLASS /** - * gRPC stub implementation for the BigQueryRead service API. + * gRPC stub implementation for BigQuery Storage API. * *

This class is for advanced usage and reflects the underlying API directly. */ -@Generated("by gapic-generator-java") +@Generated("by gapic-generator") +@BetaApi("A restructuring of stub classes is planned, so this may break in the future") public class GrpcBigQueryReadStub extends BigQueryReadStub { + private static final MethodDescriptor createReadSessionMethodDescriptor = MethodDescriptor.newBuilder() @@ -57,7 +58,6 @@ public class GrpcBigQueryReadStub extends BigQueryReadStub { ProtoUtils.marshaller(CreateReadSessionRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(ReadSession.getDefaultInstance())) .build(); - private static final MethodDescriptor readRowsMethodDescriptor = MethodDescriptor.newBuilder() @@ -66,7 +66,6 @@ public class GrpcBigQueryReadStub extends BigQueryReadStub { .setRequestMarshaller(ProtoUtils.marshaller(ReadRowsRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(ReadRowsResponse.getDefaultInstance())) .build(); - private static final MethodDescriptor splitReadStreamMethodDescriptor = MethodDescriptor.newBuilder() @@ -79,13 +78,13 @@ public class GrpcBigQueryReadStub extends BigQueryReadStub { ProtoUtils.marshaller(SplitReadStreamResponse.getDefaultInstance())) .build(); + private final BackgroundResource backgroundResources; + private final UnaryCallable createReadSessionCallable; private final ServerStreamingCallable readRowsCallable; private final UnaryCallable splitReadStreamCallable; - private final BackgroundResource backgroundResources; - private final GrpcOperationsStub operationsStub; private final GrpcStubCallableFactory callableFactory; public static final GrpcBigQueryReadStub create(BigQueryReadStubSettings settings) @@ -103,18 +102,27 @@ public static final GrpcBigQueryReadStub create( BigQueryReadStubSettings.newBuilder().build(), clientContext, callableFactory); } + /** + * Constructs an instance of GrpcBigQueryReadStub, using the given settings. This is protected so + * that it is easy to make a subclass, but otherwise, the static factory methods should be + * preferred. + */ protected GrpcBigQueryReadStub(BigQueryReadStubSettings settings, ClientContext clientContext) throws IOException { this(settings, clientContext, new GrpcBigQueryReadCallableFactory()); } + /** + * Constructs an instance of GrpcBigQueryReadStub, using the given settings. This is protected so + * that it is easy to make a subclass, but otherwise, the static factory methods should be + * preferred. + */ protected GrpcBigQueryReadStub( BigQueryReadStubSettings settings, ClientContext clientContext, GrpcStubCallableFactory callableFactory) throws IOException { this.callableFactory = callableFactory; - this.operationsStub = GrpcOperationsStub.create(clientContext, callableFactory); GrpcCallSettings createReadSessionTransportSettings = GrpcCallSettings.newBuilder() @@ -170,12 +178,7 @@ public Map extract(SplitReadStreamRequest request) { callableFactory.createUnaryCallable( splitReadStreamTransportSettings, settings.splitReadStreamSettings(), clientContext); - this.backgroundResources = - new BackgroundResourceAggregation(clientContext.getBackgroundResources()); - } - - public GrpcOperationsStub getOperationsStub() { - return operationsStub; + backgroundResources = new BackgroundResourceAggregation(clientContext.getBackgroundResources()); } public UnaryCallable createReadSessionCallable() { diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/stub/GrpcBigQueryWriteCallableFactory.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/stub/GrpcBigQueryWriteCallableFactory.java index 0831c1c84e..985997ff97 100644 --- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/stub/GrpcBigQueryWriteCallableFactory.java +++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/stub/GrpcBigQueryWriteCallableFactory.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,9 +13,9 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - package com.google.cloud.bigquery.storage.v1beta2.stub; +import com.google.api.core.BetaApi; import com.google.api.gax.grpc.GrpcCallSettings; import com.google.api.gax.grpc.GrpcCallableFactory; import com.google.api.gax.grpc.GrpcStubCallableFactory; @@ -31,19 +31,18 @@ import com.google.api.gax.rpc.StreamingCallSettings; import com.google.api.gax.rpc.UnaryCallSettings; import com.google.api.gax.rpc.UnaryCallable; -import com.google.longrunning.Operation; import com.google.longrunning.stub.OperationsStub; import javax.annotation.Generated; -// AUTO-GENERATED DOCUMENTATION AND CLASS. +// AUTO-GENERATED DOCUMENTATION AND CLASS /** - * gRPC callable factory implementation for the BigQueryWrite service API. + * gRPC callable factory implementation for BigQuery Storage API. * *

This class is for advanced usage. */ @Generated("by gapic-generator") +@BetaApi("The surface for use by generated code is not stable yet and may change in the future.") public class GrpcBigQueryWriteCallableFactory implements GrpcStubCallableFactory { - @Override public UnaryCallable createUnaryCallable( GrpcCallSettings grpcCallSettings, @@ -56,58 +55,61 @@ public UnaryCallable createUnaryCalla public UnaryCallable createPagedCallable( GrpcCallSettings grpcCallSettings, - PagedCallSettings callSettings, + PagedCallSettings pagedCallSettings, ClientContext clientContext) { - return GrpcCallableFactory.createPagedCallable(grpcCallSettings, callSettings, clientContext); + return GrpcCallableFactory.createPagedCallable( + grpcCallSettings, pagedCallSettings, clientContext); } @Override public UnaryCallable createBatchingCallable( GrpcCallSettings grpcCallSettings, - BatchingCallSettings callSettings, + BatchingCallSettings batchingCallSettings, ClientContext clientContext) { return GrpcCallableFactory.createBatchingCallable( - grpcCallSettings, callSettings, clientContext); + grpcCallSettings, batchingCallSettings, clientContext); } + @BetaApi( + "The surface for long-running operations is not stable yet and may change in the future.") @Override public OperationCallable createOperationCallable( - GrpcCallSettings grpcCallSettings, - OperationCallSettings callSettings, + GrpcCallSettings grpcCallSettings, + OperationCallSettings operationCallSettings, ClientContext clientContext, OperationsStub operationsStub) { return GrpcCallableFactory.createOperationCallable( - grpcCallSettings, callSettings, clientContext, operationsStub); + grpcCallSettings, operationCallSettings, clientContext, operationsStub); } @Override public BidiStreamingCallable createBidiStreamingCallable( GrpcCallSettings grpcCallSettings, - StreamingCallSettings callSettings, + StreamingCallSettings streamingCallSettings, ClientContext clientContext) { return GrpcCallableFactory.createBidiStreamingCallable( - grpcCallSettings, callSettings, clientContext); + grpcCallSettings, streamingCallSettings, clientContext); } @Override public ServerStreamingCallable createServerStreamingCallable( GrpcCallSettings grpcCallSettings, - ServerStreamingCallSettings callSettings, + ServerStreamingCallSettings streamingCallSettings, ClientContext clientContext) { return GrpcCallableFactory.createServerStreamingCallable( - grpcCallSettings, callSettings, clientContext); + grpcCallSettings, streamingCallSettings, clientContext); } @Override public ClientStreamingCallable createClientStreamingCallable( GrpcCallSettings grpcCallSettings, - StreamingCallSettings callSettings, + StreamingCallSettings streamingCallSettings, ClientContext clientContext) { return GrpcCallableFactory.createClientStreamingCallable( - grpcCallSettings, callSettings, clientContext); + grpcCallSettings, streamingCallSettings, clientContext); } } diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/stub/GrpcBigQueryWriteStub.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/stub/GrpcBigQueryWriteStub.java index c1fd66050f..262b7557f9 100644 --- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/stub/GrpcBigQueryWriteStub.java +++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/stub/GrpcBigQueryWriteStub.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,9 +13,9 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - package com.google.cloud.bigquery.storage.v1beta2.stub; +import com.google.api.core.BetaApi; import com.google.api.gax.core.BackgroundResource; import com.google.api.gax.core.BackgroundResourceAggregation; import com.google.api.gax.grpc.GrpcCallSettings; @@ -36,7 +36,6 @@ import com.google.cloud.bigquery.storage.v1beta2.GetWriteStreamRequest; import com.google.cloud.bigquery.storage.v1beta2.WriteStream; import com.google.common.collect.ImmutableMap; -import com.google.longrunning.stub.GrpcOperationsStub; import io.grpc.MethodDescriptor; import io.grpc.protobuf.ProtoUtils; import java.io.IOException; @@ -44,14 +43,16 @@ import java.util.concurrent.TimeUnit; import javax.annotation.Generated; -// AUTO-GENERATED DOCUMENTATION AND CLASS. +// AUTO-GENERATED DOCUMENTATION AND CLASS /** - * gRPC stub implementation for the BigQueryWrite service API. + * gRPC stub implementation for BigQuery Storage API. * *

This class is for advanced usage and reflects the underlying API directly. */ -@Generated("by gapic-generator-java") +@Generated("by gapic-generator") +@BetaApi("A restructuring of stub classes is planned, so this may break in the future") public class GrpcBigQueryWriteStub extends BigQueryWriteStub { + private static final MethodDescriptor createWriteStreamMethodDescriptor = MethodDescriptor.newBuilder() @@ -62,7 +63,6 @@ public class GrpcBigQueryWriteStub extends BigQueryWriteStub { ProtoUtils.marshaller(CreateWriteStreamRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(WriteStream.getDefaultInstance())) .build(); - private static final MethodDescriptor appendRowsMethodDescriptor = MethodDescriptor.newBuilder() @@ -71,7 +71,6 @@ public class GrpcBigQueryWriteStub extends BigQueryWriteStub { .setRequestMarshaller(ProtoUtils.marshaller(AppendRowsRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(AppendRowsResponse.getDefaultInstance())) .build(); - private static final MethodDescriptor getWriteStreamMethodDescriptor = MethodDescriptor.newBuilder() @@ -82,7 +81,6 @@ public class GrpcBigQueryWriteStub extends BigQueryWriteStub { ProtoUtils.marshaller(GetWriteStreamRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(WriteStream.getDefaultInstance())) .build(); - private static final MethodDescriptor finalizeWriteStreamMethodDescriptor = MethodDescriptor.newBuilder() @@ -94,7 +92,6 @@ public class GrpcBigQueryWriteStub extends BigQueryWriteStub { .setResponseMarshaller( ProtoUtils.marshaller(FinalizeWriteStreamResponse.getDefaultInstance())) .build(); - private static final MethodDescriptor< BatchCommitWriteStreamsRequest, BatchCommitWriteStreamsResponse> batchCommitWriteStreamsMethodDescriptor = @@ -108,7 +105,6 @@ public class GrpcBigQueryWriteStub extends BigQueryWriteStub { .setResponseMarshaller( ProtoUtils.marshaller(BatchCommitWriteStreamsResponse.getDefaultInstance())) .build(); - private static final MethodDescriptor flushRowsMethodDescriptor = MethodDescriptor.newBuilder() @@ -118,6 +114,8 @@ public class GrpcBigQueryWriteStub extends BigQueryWriteStub { .setResponseMarshaller(ProtoUtils.marshaller(FlushRowsResponse.getDefaultInstance())) .build(); + private final BackgroundResource backgroundResources; + private final UnaryCallable createWriteStreamCallable; private final BidiStreamingCallable appendRowsCallable; private final UnaryCallable getWriteStreamCallable; @@ -127,8 +125,6 @@ public class GrpcBigQueryWriteStub extends BigQueryWriteStub { batchCommitWriteStreamsCallable; private final UnaryCallable flushRowsCallable; - private final BackgroundResource backgroundResources; - private final GrpcOperationsStub operationsStub; private final GrpcStubCallableFactory callableFactory; public static final GrpcBigQueryWriteStub create(BigQueryWriteStubSettings settings) @@ -146,18 +142,27 @@ public static final GrpcBigQueryWriteStub create( BigQueryWriteStubSettings.newBuilder().build(), clientContext, callableFactory); } + /** + * Constructs an instance of GrpcBigQueryWriteStub, using the given settings. This is protected so + * that it is easy to make a subclass, but otherwise, the static factory methods should be + * preferred. + */ protected GrpcBigQueryWriteStub(BigQueryWriteStubSettings settings, ClientContext clientContext) throws IOException { this(settings, clientContext, new GrpcBigQueryWriteCallableFactory()); } + /** + * Constructs an instance of GrpcBigQueryWriteStub, using the given settings. This is protected so + * that it is easy to make a subclass, but otherwise, the static factory methods should be + * preferred. + */ protected GrpcBigQueryWriteStub( BigQueryWriteStubSettings settings, ClientContext clientContext, GrpcStubCallableFactory callableFactory) throws IOException { this.callableFactory = callableFactory; - this.operationsStub = GrpcOperationsStub.create(clientContext, callableFactory); GrpcCallSettings createWriteStreamTransportSettings = GrpcCallSettings.newBuilder() @@ -175,15 +180,6 @@ public Map extract(CreateWriteStreamRequest request) { GrpcCallSettings appendRowsTransportSettings = GrpcCallSettings.newBuilder() .setMethodDescriptor(appendRowsMethodDescriptor) - .setParamsExtractor( - new RequestParamsExtractor() { - @Override - public Map extract(AppendRowsRequest request) { - ImmutableMap.Builder params = ImmutableMap.builder(); - params.put("write_stream", String.valueOf(request.getWriteStream())); - return params.build(); - } - }) .build(); GrpcCallSettings getWriteStreamTransportSettings = GrpcCallSettings.newBuilder() @@ -266,12 +262,7 @@ public Map extract(FlushRowsRequest request) { callableFactory.createUnaryCallable( flushRowsTransportSettings, settings.flushRowsSettings(), clientContext); - this.backgroundResources = - new BackgroundResourceAggregation(clientContext.getBackgroundResources()); - } - - public GrpcOperationsStub getOperationsStub() { - return operationsStub; + backgroundResources = new BackgroundResourceAggregation(clientContext.getBackgroundResources()); } public UnaryCallable createWriteStreamCallable() { diff --git a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1/BaseBigQueryReadClientTest.java b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1/BaseBigQueryReadClientTest.java index 647e921610..1217dca250 100644 --- a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1/BaseBigQueryReadClientTest.java +++ b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1/BaseBigQueryReadClientTest.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,7 +13,6 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - package com.google.cloud.bigquery.storage.v1; import com.google.api.gax.core.NoCredentialsProvider; @@ -27,15 +26,13 @@ import com.google.api.gax.rpc.ServerStreamingCallable; import com.google.api.gax.rpc.StatusCode; import com.google.protobuf.AbstractMessage; -import com.google.protobuf.Timestamp; +import io.grpc.Status; import io.grpc.StatusRuntimeException; import java.io.IOException; -import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.UUID; import java.util.concurrent.ExecutionException; -import javax.annotation.Generated; import org.junit.After; import org.junit.AfterClass; import org.junit.Assert; @@ -43,31 +40,31 @@ import org.junit.BeforeClass; import org.junit.Test; -@Generated("by gapic-generator-java") +@javax.annotation.Generated("by GAPIC") public class BaseBigQueryReadClientTest { private static MockBigQueryRead mockBigQueryRead; - private static MockServiceHelper mockServiceHelper; + private static MockServiceHelper serviceHelper; private BaseBigQueryReadClient client; private LocalChannelProvider channelProvider; @BeforeClass public static void startStaticServer() { mockBigQueryRead = new MockBigQueryRead(); - mockServiceHelper = + serviceHelper = new MockServiceHelper( UUID.randomUUID().toString(), Arrays.asList(mockBigQueryRead)); - mockServiceHelper.start(); + serviceHelper.start(); } @AfterClass public static void stopServer() { - mockServiceHelper.stop(); + serviceHelper.stop(); } @Before public void setUp() throws IOException { - mockServiceHelper.reset(); - channelProvider = mockServiceHelper.createChannelProvider(); + serviceHelper.reset(); + channelProvider = serviceHelper.createChannelProvider(); BaseBigQueryReadSettings settings = BaseBigQueryReadSettings.newBuilder() .setTransportChannelProvider(channelProvider) @@ -82,14 +79,12 @@ public void tearDown() throws Exception { } @Test - public void createReadSessionTest() throws Exception { + @SuppressWarnings("all") + public void createReadSessionTest() { + ReadSessionName name = ReadSessionName.of("[PROJECT]", "[LOCATION]", "[SESSION]"); + TableName table = TableName.of("[PROJECT]", "[DATASET]", "[TABLE]"); ReadSession expectedResponse = - ReadSession.newBuilder() - .setName("name3373707") - .setExpireTime(Timestamp.newBuilder().build()) - .setTable("table110115790") - .addAllStreams(new ArrayList()) - .build(); + ReadSession.newBuilder().setName(name.toString()).setTable(table.toString()).build(); mockBigQueryRead.addResponse(expectedResponse); ProjectName parent = ProjectName.of("[PROJECT]"); @@ -101,9 +96,9 @@ public void createReadSessionTest() throws Exception { List actualRequests = mockBigQueryRead.getRequests(); Assert.assertEquals(1, actualRequests.size()); - CreateReadSessionRequest actualRequest = ((CreateReadSessionRequest) actualRequests.get(0)); + CreateReadSessionRequest actualRequest = (CreateReadSessionRequest) actualRequests.get(0); - Assert.assertEquals(parent.toString(), actualRequest.getParent()); + Assert.assertEquals(parent, ProjectName.parse(actualRequest.getParent())); Assert.assertEquals(readSession, actualRequest.getReadSession()); Assert.assertEquals(maxStreamCount, actualRequest.getMaxStreamCount()); Assert.assertTrue( @@ -113,83 +108,33 @@ public void createReadSessionTest() throws Exception { } @Test + @SuppressWarnings("all") public void createReadSessionExceptionTest() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); + StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); mockBigQueryRead.addException(exception); try { ProjectName parent = ProjectName.of("[PROJECT]"); ReadSession readSession = ReadSession.newBuilder().build(); int maxStreamCount = 940837515; - client.createReadSession(parent, readSession, maxStreamCount); - Assert.fail("No exception raised"); - } catch (InvalidArgumentException e) { - // Expected exception. - } - } - - @Test - public void createReadSessionTest2() throws Exception { - ReadSession expectedResponse = - ReadSession.newBuilder() - .setName("name3373707") - .setExpireTime(Timestamp.newBuilder().build()) - .setTable("table110115790") - .addAllStreams(new ArrayList()) - .build(); - mockBigQueryRead.addResponse(expectedResponse); - - String parent = "parent-995424086"; - ReadSession readSession = ReadSession.newBuilder().build(); - int maxStreamCount = 940837515; - - ReadSession actualResponse = client.createReadSession(parent, readSession, maxStreamCount); - Assert.assertEquals(expectedResponse, actualResponse); - - List actualRequests = mockBigQueryRead.getRequests(); - Assert.assertEquals(1, actualRequests.size()); - CreateReadSessionRequest actualRequest = ((CreateReadSessionRequest) actualRequests.get(0)); - Assert.assertEquals(parent, actualRequest.getParent()); - Assert.assertEquals(readSession, actualRequest.getReadSession()); - Assert.assertEquals(maxStreamCount, actualRequest.getMaxStreamCount()); - Assert.assertTrue( - channelProvider.isHeaderSent( - ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), - GaxGrpcProperties.getDefaultApiClientHeaderPattern())); - } - - @Test - public void createReadSessionExceptionTest2() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); - mockBigQueryRead.addException(exception); - - try { - String parent = "parent-995424086"; - ReadSession readSession = ReadSession.newBuilder().build(); - int maxStreamCount = 940837515; client.createReadSession(parent, readSession, maxStreamCount); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { - // Expected exception. + // Expected exception } } @Test + @SuppressWarnings("all") public void readRowsTest() throws Exception { - ReadRowsResponse expectedResponse = - ReadRowsResponse.newBuilder() - .setRowCount(1340416618) - .setStats(StreamStats.newBuilder().build()) - .setThrottleState(ThrottleState.newBuilder().build()) - .build(); + long rowCount = 1340416618L; + ReadRowsResponse expectedResponse = ReadRowsResponse.newBuilder().setRowCount(rowCount).build(); mockBigQueryRead.addResponse(expectedResponse); + ReadStreamName readStream = + ReadStreamName.of("[PROJECT]", "[LOCATION]", "[SESSION]", "[STREAM]"); ReadRowsRequest request = - ReadRowsRequest.newBuilder() - .setReadStream( - ReadStreamName.of("[PROJECT]", "[LOCATION]", "[SESSION]", "[STREAM]").toString()) - .setOffset(-1019779949) - .build(); + ReadRowsRequest.newBuilder().setReadStream(readStream.toString()).build(); MockStreamObserver responseObserver = new MockStreamObserver<>(); @@ -202,15 +147,14 @@ public void readRowsTest() throws Exception { } @Test + @SuppressWarnings("all") public void readRowsExceptionTest() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); + StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); mockBigQueryRead.addException(exception); + ReadStreamName readStream = + ReadStreamName.of("[PROJECT]", "[LOCATION]", "[SESSION]", "[STREAM]"); ReadRowsRequest request = - ReadRowsRequest.newBuilder() - .setReadStream( - ReadStreamName.of("[PROJECT]", "[LOCATION]", "[SESSION]", "[STREAM]").toString()) - .setOffset(-1019779949) - .build(); + ReadRowsRequest.newBuilder().setReadStream(readStream.toString()).build(); MockStreamObserver responseObserver = new MockStreamObserver<>(); @@ -222,36 +166,29 @@ public void readRowsExceptionTest() throws Exception { Assert.fail("No exception thrown"); } catch (ExecutionException e) { Assert.assertTrue(e.getCause() instanceof InvalidArgumentException); - InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause()); + InvalidArgumentException apiException = (InvalidArgumentException) e.getCause(); Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode()); } } @Test - public void splitReadStreamTest() throws Exception { - SplitReadStreamResponse expectedResponse = - SplitReadStreamResponse.newBuilder() - .setPrimaryStream(ReadStream.newBuilder().build()) - .setRemainderStream(ReadStream.newBuilder().build()) - .build(); + @SuppressWarnings("all") + public void splitReadStreamTest() { + SplitReadStreamResponse expectedResponse = SplitReadStreamResponse.newBuilder().build(); mockBigQueryRead.addResponse(expectedResponse); + ReadStreamName name = ReadStreamName.of("[PROJECT]", "[LOCATION]", "[SESSION]", "[STREAM]"); SplitReadStreamRequest request = - SplitReadStreamRequest.newBuilder() - .setName( - ReadStreamName.of("[PROJECT]", "[LOCATION]", "[SESSION]", "[STREAM]").toString()) - .setFraction(-1653751294) - .build(); + SplitReadStreamRequest.newBuilder().setName(name.toString()).build(); SplitReadStreamResponse actualResponse = client.splitReadStream(request); Assert.assertEquals(expectedResponse, actualResponse); List actualRequests = mockBigQueryRead.getRequests(); Assert.assertEquals(1, actualRequests.size()); - SplitReadStreamRequest actualRequest = ((SplitReadStreamRequest) actualRequests.get(0)); + SplitReadStreamRequest actualRequest = (SplitReadStreamRequest) actualRequests.get(0); - Assert.assertEquals(request.getName(), actualRequest.getName()); - Assert.assertEquals(request.getFraction(), actualRequest.getFraction(), 0.0001); + Assert.assertEquals(name, ReadStreamName.parse(actualRequest.getName())); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), @@ -259,21 +196,20 @@ public void splitReadStreamTest() throws Exception { } @Test + @SuppressWarnings("all") public void splitReadStreamExceptionTest() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); + StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); mockBigQueryRead.addException(exception); try { + ReadStreamName name = ReadStreamName.of("[PROJECT]", "[LOCATION]", "[SESSION]", "[STREAM]"); SplitReadStreamRequest request = - SplitReadStreamRequest.newBuilder() - .setName( - ReadStreamName.of("[PROJECT]", "[LOCATION]", "[SESSION]", "[STREAM]").toString()) - .setFraction(-1653751294) - .build(); + SplitReadStreamRequest.newBuilder().setName(name.toString()).build(); + client.splitReadStream(request); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { - // Expected exception. + // Expected exception } } } diff --git a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1/MockBigQueryRead.java b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1/MockBigQueryRead.java index d4972d28a7..6c578b0d17 100644 --- a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1/MockBigQueryRead.java +++ b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1/MockBigQueryRead.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,7 +13,6 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - package com.google.cloud.bigquery.storage.v1; import com.google.api.core.BetaApi; @@ -21,10 +20,9 @@ import com.google.protobuf.AbstractMessage; import io.grpc.ServerServiceDefinition; import java.util.List; -import javax.annotation.Generated; +@javax.annotation.Generated("by GAPIC") @BetaApi -@Generated("by gapic-generator-java") public class MockBigQueryRead implements MockGrpcService { private final MockBigQueryReadImpl serviceImpl; diff --git a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1/MockBigQueryReadImpl.java b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1/MockBigQueryReadImpl.java index 21e64df693..b6e022ac6f 100644 --- a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1/MockBigQueryReadImpl.java +++ b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1/MockBigQueryReadImpl.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,7 +13,6 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - package com.google.cloud.bigquery.storage.v1; import com.google.api.core.BetaApi; @@ -24,10 +23,9 @@ import java.util.LinkedList; import java.util.List; import java.util.Queue; -import javax.annotation.Generated; +@javax.annotation.Generated("by GAPIC") @BetaApi -@Generated("by gapic-generator-java") public class MockBigQueryReadImpl extends BigQueryReadImplBase { private List requests; private Queue responses; @@ -64,10 +62,10 @@ public void createReadSession( Object response = responses.remove(); if (response instanceof ReadSession) { requests.add(request); - responseObserver.onNext(((ReadSession) response)); + responseObserver.onNext((ReadSession) response); responseObserver.onCompleted(); } else if (response instanceof Exception) { - responseObserver.onError(((Exception) response)); + responseObserver.onError((Exception) response); } else { responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); } @@ -78,10 +76,10 @@ public void readRows(ReadRowsRequest request, StreamObserver r Object response = responses.remove(); if (response instanceof ReadRowsResponse) { requests.add(request); - responseObserver.onNext(((ReadRowsResponse) response)); + responseObserver.onNext((ReadRowsResponse) response); responseObserver.onCompleted(); } else if (response instanceof Exception) { - responseObserver.onError(((Exception) response)); + responseObserver.onError((Exception) response); } else { responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); } @@ -93,10 +91,10 @@ public void splitReadStream( Object response = responses.remove(); if (response instanceof SplitReadStreamResponse) { requests.add(request); - responseObserver.onNext(((SplitReadStreamResponse) response)); + responseObserver.onNext((SplitReadStreamResponse) response); responseObserver.onCompleted(); } else if (response instanceof Exception) { - responseObserver.onError(((Exception) response)); + responseObserver.onError((Exception) response); } else { responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); } diff --git a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1alpha2/BigQueryWriteClientTest.java b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1alpha2/BigQueryWriteClientTest.java index 9698758a03..f035c493f5 100644 --- a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1alpha2/BigQueryWriteClientTest.java +++ b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1alpha2/BigQueryWriteClientTest.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,7 +13,6 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - package com.google.cloud.bigquery.storage.v1alpha2; import com.google.api.gax.core.NoCredentialsProvider; @@ -27,15 +26,25 @@ import com.google.api.gax.rpc.BidiStreamingCallable; import com.google.api.gax.rpc.InvalidArgumentException; import com.google.api.gax.rpc.StatusCode; +import com.google.cloud.bigquery.storage.v1alpha2.Storage.AppendRowsRequest; +import com.google.cloud.bigquery.storage.v1alpha2.Storage.AppendRowsResponse; +import com.google.cloud.bigquery.storage.v1alpha2.Storage.BatchCommitWriteStreamsRequest; +import com.google.cloud.bigquery.storage.v1alpha2.Storage.BatchCommitWriteStreamsResponse; +import com.google.cloud.bigquery.storage.v1alpha2.Storage.CreateWriteStreamRequest; +import com.google.cloud.bigquery.storage.v1alpha2.Storage.FinalizeWriteStreamRequest; +import com.google.cloud.bigquery.storage.v1alpha2.Storage.FinalizeWriteStreamResponse; +import com.google.cloud.bigquery.storage.v1alpha2.Storage.FlushRowsRequest; +import com.google.cloud.bigquery.storage.v1alpha2.Storage.FlushRowsResponse; +import com.google.cloud.bigquery.storage.v1alpha2.Storage.GetWriteStreamRequest; +import com.google.cloud.bigquery.storage.v1alpha2.Stream.WriteStream; import com.google.protobuf.AbstractMessage; -import com.google.protobuf.Int64Value; +import io.grpc.Status; import io.grpc.StatusRuntimeException; import java.io.IOException; import java.util.Arrays; import java.util.List; import java.util.UUID; import java.util.concurrent.ExecutionException; -import javax.annotation.Generated; import org.junit.After; import org.junit.AfterClass; import org.junit.Assert; @@ -43,31 +52,31 @@ import org.junit.BeforeClass; import org.junit.Test; -@Generated("by gapic-generator-java") +@javax.annotation.Generated("by GAPIC") public class BigQueryWriteClientTest { - private static MockServiceHelper mockServiceHelper; + private static MockBigQueryWrite mockBigQueryWrite; + private static MockServiceHelper serviceHelper; private BigQueryWriteClient client; private LocalChannelProvider channelProvider; - private static MockBigQueryWrite mockBigQueryWrite; @BeforeClass public static void startStaticServer() { mockBigQueryWrite = new MockBigQueryWrite(); - mockServiceHelper = + serviceHelper = new MockServiceHelper( UUID.randomUUID().toString(), Arrays.asList(mockBigQueryWrite)); - mockServiceHelper.start(); + serviceHelper.start(); } @AfterClass public static void stopServer() { - mockServiceHelper.stop(); + serviceHelper.stop(); } @Before public void setUp() throws IOException { - mockServiceHelper.reset(); - channelProvider = mockServiceHelper.createChannelProvider(); + serviceHelper.reset(); + channelProvider = serviceHelper.createChannelProvider(); BigQueryWriteSettings settings = BigQueryWriteSettings.newBuilder() .setTransportChannelProvider(channelProvider) @@ -82,22 +91,25 @@ public void tearDown() throws Exception { } @Test - public void createWriteStreamTest() throws Exception { - Stream.WriteStream expectedResponse = Stream.WriteStream.newBuilder().build(); + @SuppressWarnings("all") + public void createWriteStreamTest() { + WriteStreamName name = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]"); + String externalId = "externalId-1153075697"; + WriteStream expectedResponse = + WriteStream.newBuilder().setName(name.toString()).setExternalId(externalId).build(); mockBigQueryWrite.addResponse(expectedResponse); TableName parent = TableName.of("[PROJECT]", "[DATASET]", "[TABLE]"); - Stream.WriteStream writeStream = Stream.WriteStream.newBuilder().build(); + WriteStream writeStream = WriteStream.newBuilder().build(); - Stream.WriteStream actualResponse = client.createWriteStream(parent, writeStream); + WriteStream actualResponse = client.createWriteStream(parent, writeStream); Assert.assertEquals(expectedResponse, actualResponse); List actualRequests = mockBigQueryWrite.getRequests(); Assert.assertEquals(1, actualRequests.size()); - Storage.CreateWriteStreamRequest actualRequest = - ((Storage.CreateWriteStreamRequest) actualRequests.get(0)); + CreateWriteStreamRequest actualRequest = (CreateWriteStreamRequest) actualRequests.get(0); - Assert.assertEquals(parent.toString(), actualRequest.getParent()); + Assert.assertEquals(parent, TableName.parse(actualRequest.getParent())); Assert.assertEquals(writeStream, actualRequest.getWriteStream()); Assert.assertTrue( channelProvider.isHeaderSent( @@ -106,133 +118,96 @@ public void createWriteStreamTest() throws Exception { } @Test + @SuppressWarnings("all") public void createWriteStreamExceptionTest() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); + StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); mockBigQueryWrite.addException(exception); try { TableName parent = TableName.of("[PROJECT]", "[DATASET]", "[TABLE]"); - Stream.WriteStream writeStream = Stream.WriteStream.newBuilder().build(); - client.createWriteStream(parent, writeStream); - Assert.fail("No exception raised"); - } catch (InvalidArgumentException e) { - // Expected exception. - } - } + WriteStream writeStream = WriteStream.newBuilder().build(); - @Test - public void createWriteStreamTest2() throws Exception { - Stream.WriteStream expectedResponse = Stream.WriteStream.newBuilder().build(); - mockBigQueryWrite.addResponse(expectedResponse); - - String parent = "parent-995424086"; - Stream.WriteStream writeStream = Stream.WriteStream.newBuilder().build(); - - Stream.WriteStream actualResponse = client.createWriteStream(parent, writeStream); - Assert.assertEquals(expectedResponse, actualResponse); - - List actualRequests = mockBigQueryWrite.getRequests(); - Assert.assertEquals(1, actualRequests.size()); - Storage.CreateWriteStreamRequest actualRequest = - ((Storage.CreateWriteStreamRequest) actualRequests.get(0)); - - Assert.assertEquals(parent, actualRequest.getParent()); - Assert.assertEquals(writeStream, actualRequest.getWriteStream()); - Assert.assertTrue( - channelProvider.isHeaderSent( - ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), - GaxGrpcProperties.getDefaultApiClientHeaderPattern())); - } - - @Test - public void createWriteStreamExceptionTest2() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); - mockBigQueryWrite.addException(exception); - - try { - String parent = "parent-995424086"; - Stream.WriteStream writeStream = Stream.WriteStream.newBuilder().build(); client.createWriteStream(parent, writeStream); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { - // Expected exception. + // Expected exception } } @Test + @SuppressWarnings("all") public void appendRowsTest() throws Exception { - Storage.AppendRowsResponse expectedResponse = Storage.AppendRowsResponse.newBuilder().build(); + long offset = 1019779949L; + AppendRowsResponse expectedResponse = AppendRowsResponse.newBuilder().setOffset(offset).build(); mockBigQueryWrite.addResponse(expectedResponse); - Storage.AppendRowsRequest request = - Storage.AppendRowsRequest.newBuilder() - .setWriteStream( - WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]").toString()) - .setOffset(Int64Value.newBuilder().build()) - .setIgnoreUnknownFields(true) - .build(); + WriteStreamName writeStream = + WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]"); + AppendRowsRequest request = + AppendRowsRequest.newBuilder().setWriteStream(writeStream.toString()).build(); - MockStreamObserver responseObserver = new MockStreamObserver<>(); + MockStreamObserver responseObserver = new MockStreamObserver<>(); - BidiStreamingCallable callable = + BidiStreamingCallable callable = client.appendRowsCallable(); - ApiStreamObserver requestObserver = + ApiStreamObserver requestObserver = callable.bidiStreamingCall(responseObserver); requestObserver.onNext(request); requestObserver.onCompleted(); - List actualResponses = responseObserver.future().get(); + List actualResponses = responseObserver.future().get(); Assert.assertEquals(1, actualResponses.size()); Assert.assertEquals(expectedResponse, actualResponses.get(0)); } @Test + @SuppressWarnings("all") public void appendRowsExceptionTest() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); + StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); mockBigQueryWrite.addException(exception); - Storage.AppendRowsRequest request = - Storage.AppendRowsRequest.newBuilder() - .setWriteStream( - WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]").toString()) - .setOffset(Int64Value.newBuilder().build()) - .setIgnoreUnknownFields(true) - .build(); + WriteStreamName writeStream = + WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]"); + AppendRowsRequest request = + AppendRowsRequest.newBuilder().setWriteStream(writeStream.toString()).build(); - MockStreamObserver responseObserver = new MockStreamObserver<>(); + MockStreamObserver responseObserver = new MockStreamObserver<>(); - BidiStreamingCallable callable = + BidiStreamingCallable callable = client.appendRowsCallable(); - ApiStreamObserver requestObserver = + ApiStreamObserver requestObserver = callable.bidiStreamingCall(responseObserver); requestObserver.onNext(request); try { - List actualResponses = responseObserver.future().get(); + List actualResponses = responseObserver.future().get(); Assert.fail("No exception thrown"); } catch (ExecutionException e) { Assert.assertTrue(e.getCause() instanceof InvalidArgumentException); - InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause()); + InvalidArgumentException apiException = (InvalidArgumentException) e.getCause(); Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode()); } } @Test - public void getWriteStreamTest() throws Exception { - Stream.WriteStream expectedResponse = Stream.WriteStream.newBuilder().build(); + @SuppressWarnings("all") + public void getWriteStreamTest() { + WriteStreamName name2 = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]"); + String externalId = "externalId-1153075697"; + WriteStream expectedResponse = + WriteStream.newBuilder().setName(name2.toString()).setExternalId(externalId).build(); mockBigQueryWrite.addResponse(expectedResponse); WriteStreamName name = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]"); - Stream.WriteStream actualResponse = client.getWriteStream(name); + WriteStream actualResponse = client.getWriteStream(name); Assert.assertEquals(expectedResponse, actualResponse); List actualRequests = mockBigQueryWrite.getRequests(); Assert.assertEquals(1, actualRequests.size()); - Storage.GetWriteStreamRequest actualRequest = - ((Storage.GetWriteStreamRequest) actualRequests.get(0)); + GetWriteStreamRequest actualRequest = (GetWriteStreamRequest) actualRequests.get(0); - Assert.assertEquals(name.toString(), actualRequest.getName()); + Assert.assertEquals(name, WriteStreamName.parse(actualRequest.getName())); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), @@ -240,72 +215,39 @@ public void getWriteStreamTest() throws Exception { } @Test + @SuppressWarnings("all") public void getWriteStreamExceptionTest() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); + StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); mockBigQueryWrite.addException(exception); try { WriteStreamName name = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]"); - client.getWriteStream(name); - Assert.fail("No exception raised"); - } catch (InvalidArgumentException e) { - // Expected exception. - } - } - @Test - public void getWriteStreamTest2() throws Exception { - Stream.WriteStream expectedResponse = Stream.WriteStream.newBuilder().build(); - mockBigQueryWrite.addResponse(expectedResponse); - - String name = "name3373707"; - - Stream.WriteStream actualResponse = client.getWriteStream(name); - Assert.assertEquals(expectedResponse, actualResponse); - - List actualRequests = mockBigQueryWrite.getRequests(); - Assert.assertEquals(1, actualRequests.size()); - Storage.GetWriteStreamRequest actualRequest = - ((Storage.GetWriteStreamRequest) actualRequests.get(0)); - - Assert.assertEquals(name, actualRequest.getName()); - Assert.assertTrue( - channelProvider.isHeaderSent( - ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), - GaxGrpcProperties.getDefaultApiClientHeaderPattern())); - } - - @Test - public void getWriteStreamExceptionTest2() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); - mockBigQueryWrite.addException(exception); - - try { - String name = "name3373707"; client.getWriteStream(name); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { - // Expected exception. + // Expected exception } } @Test - public void finalizeWriteStreamTest() throws Exception { - Storage.FinalizeWriteStreamResponse expectedResponse = - Storage.FinalizeWriteStreamResponse.newBuilder().build(); + @SuppressWarnings("all") + public void finalizeWriteStreamTest() { + long rowCount = 1340416618L; + FinalizeWriteStreamResponse expectedResponse = + FinalizeWriteStreamResponse.newBuilder().setRowCount(rowCount).build(); mockBigQueryWrite.addResponse(expectedResponse); WriteStreamName name = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]"); - Storage.FinalizeWriteStreamResponse actualResponse = client.finalizeWriteStream(name); + FinalizeWriteStreamResponse actualResponse = client.finalizeWriteStream(name); Assert.assertEquals(expectedResponse, actualResponse); List actualRequests = mockBigQueryWrite.getRequests(); Assert.assertEquals(1, actualRequests.size()); - Storage.FinalizeWriteStreamRequest actualRequest = - ((Storage.FinalizeWriteStreamRequest) actualRequests.get(0)); + FinalizeWriteStreamRequest actualRequest = (FinalizeWriteStreamRequest) actualRequests.get(0); - Assert.assertEquals(name.toString(), actualRequest.getName()); + Assert.assertEquals(name, WriteStreamName.parse(actualRequest.getName())); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), @@ -313,73 +255,39 @@ public void finalizeWriteStreamTest() throws Exception { } @Test + @SuppressWarnings("all") public void finalizeWriteStreamExceptionTest() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); + StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); mockBigQueryWrite.addException(exception); try { WriteStreamName name = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]"); - client.finalizeWriteStream(name); - Assert.fail("No exception raised"); - } catch (InvalidArgumentException e) { - // Expected exception. - } - } - - @Test - public void finalizeWriteStreamTest2() throws Exception { - Storage.FinalizeWriteStreamResponse expectedResponse = - Storage.FinalizeWriteStreamResponse.newBuilder().build(); - mockBigQueryWrite.addResponse(expectedResponse); - - String name = "name3373707"; - Storage.FinalizeWriteStreamResponse actualResponse = client.finalizeWriteStream(name); - Assert.assertEquals(expectedResponse, actualResponse); - - List actualRequests = mockBigQueryWrite.getRequests(); - Assert.assertEquals(1, actualRequests.size()); - Storage.FinalizeWriteStreamRequest actualRequest = - ((Storage.FinalizeWriteStreamRequest) actualRequests.get(0)); - - Assert.assertEquals(name, actualRequest.getName()); - Assert.assertTrue( - channelProvider.isHeaderSent( - ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), - GaxGrpcProperties.getDefaultApiClientHeaderPattern())); - } - - @Test - public void finalizeWriteStreamExceptionTest2() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); - mockBigQueryWrite.addException(exception); - - try { - String name = "name3373707"; client.finalizeWriteStream(name); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { - // Expected exception. + // Expected exception } } @Test - public void batchCommitWriteStreamsTest() throws Exception { - Storage.BatchCommitWriteStreamsResponse expectedResponse = - Storage.BatchCommitWriteStreamsResponse.newBuilder().build(); + @SuppressWarnings("all") + public void batchCommitWriteStreamsTest() { + BatchCommitWriteStreamsResponse expectedResponse = + BatchCommitWriteStreamsResponse.newBuilder().build(); mockBigQueryWrite.addResponse(expectedResponse); TableName parent = TableName.of("[PROJECT]", "[DATASET]", "[TABLE]"); - Storage.BatchCommitWriteStreamsResponse actualResponse = client.batchCommitWriteStreams(parent); + BatchCommitWriteStreamsResponse actualResponse = client.batchCommitWriteStreams(parent); Assert.assertEquals(expectedResponse, actualResponse); List actualRequests = mockBigQueryWrite.getRequests(); Assert.assertEquals(1, actualRequests.size()); - Storage.BatchCommitWriteStreamsRequest actualRequest = - ((Storage.BatchCommitWriteStreamsRequest) actualRequests.get(0)); + BatchCommitWriteStreamsRequest actualRequest = + (BatchCommitWriteStreamsRequest) actualRequests.get(0); - Assert.assertEquals(parent.toString(), actualRequest.getParent()); + Assert.assertEquals(parent, TableName.parse(actualRequest.getParent())); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), @@ -387,72 +295,39 @@ public void batchCommitWriteStreamsTest() throws Exception { } @Test + @SuppressWarnings("all") public void batchCommitWriteStreamsExceptionTest() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); + StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); mockBigQueryWrite.addException(exception); try { TableName parent = TableName.of("[PROJECT]", "[DATASET]", "[TABLE]"); - client.batchCommitWriteStreams(parent); - Assert.fail("No exception raised"); - } catch (InvalidArgumentException e) { - // Expected exception. - } - } - - @Test - public void batchCommitWriteStreamsTest2() throws Exception { - Storage.BatchCommitWriteStreamsResponse expectedResponse = - Storage.BatchCommitWriteStreamsResponse.newBuilder().build(); - mockBigQueryWrite.addResponse(expectedResponse); - - String parent = "parent-995424086"; - - Storage.BatchCommitWriteStreamsResponse actualResponse = client.batchCommitWriteStreams(parent); - Assert.assertEquals(expectedResponse, actualResponse); - - List actualRequests = mockBigQueryWrite.getRequests(); - Assert.assertEquals(1, actualRequests.size()); - Storage.BatchCommitWriteStreamsRequest actualRequest = - ((Storage.BatchCommitWriteStreamsRequest) actualRequests.get(0)); - - Assert.assertEquals(parent, actualRequest.getParent()); - Assert.assertTrue( - channelProvider.isHeaderSent( - ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), - GaxGrpcProperties.getDefaultApiClientHeaderPattern())); - } - @Test - public void batchCommitWriteStreamsExceptionTest2() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); - mockBigQueryWrite.addException(exception); - - try { - String parent = "parent-995424086"; client.batchCommitWriteStreams(parent); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { - // Expected exception. + // Expected exception } } @Test - public void flushRowsTest() throws Exception { - Storage.FlushRowsResponse expectedResponse = Storage.FlushRowsResponse.newBuilder().build(); + @SuppressWarnings("all") + public void flushRowsTest() { + long offset = 1019779949L; + FlushRowsResponse expectedResponse = FlushRowsResponse.newBuilder().setOffset(offset).build(); mockBigQueryWrite.addResponse(expectedResponse); WriteStreamName writeStream = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]"); - Storage.FlushRowsResponse actualResponse = client.flushRows(writeStream); + FlushRowsResponse actualResponse = client.flushRows(writeStream); Assert.assertEquals(expectedResponse, actualResponse); List actualRequests = mockBigQueryWrite.getRequests(); Assert.assertEquals(1, actualRequests.size()); - Storage.FlushRowsRequest actualRequest = ((Storage.FlushRowsRequest) actualRequests.get(0)); + FlushRowsRequest actualRequest = (FlushRowsRequest) actualRequests.get(0); - Assert.assertEquals(writeStream.toString(), actualRequest.getWriteStream()); + Assert.assertEquals(writeStream, WriteStreamName.parse(actualRequest.getWriteStream())); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), @@ -460,52 +335,19 @@ public void flushRowsTest() throws Exception { } @Test + @SuppressWarnings("all") public void flushRowsExceptionTest() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); + StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); mockBigQueryWrite.addException(exception); try { WriteStreamName writeStream = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]"); - client.flushRows(writeStream); - Assert.fail("No exception raised"); - } catch (InvalidArgumentException e) { - // Expected exception. - } - } - - @Test - public void flushRowsTest2() throws Exception { - Storage.FlushRowsResponse expectedResponse = Storage.FlushRowsResponse.newBuilder().build(); - mockBigQueryWrite.addResponse(expectedResponse); - - String writeStream = "write_stream-1431753760"; - Storage.FlushRowsResponse actualResponse = client.flushRows(writeStream); - Assert.assertEquals(expectedResponse, actualResponse); - - List actualRequests = mockBigQueryWrite.getRequests(); - Assert.assertEquals(1, actualRequests.size()); - Storage.FlushRowsRequest actualRequest = ((Storage.FlushRowsRequest) actualRequests.get(0)); - - Assert.assertEquals(writeStream, actualRequest.getWriteStream()); - Assert.assertTrue( - channelProvider.isHeaderSent( - ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), - GaxGrpcProperties.getDefaultApiClientHeaderPattern())); - } - - @Test - public void flushRowsExceptionTest2() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); - mockBigQueryWrite.addException(exception); - - try { - String writeStream = "write_stream-1431753760"; client.flushRows(writeStream); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { - // Expected exception. + // Expected exception } } } diff --git a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1alpha2/MockBigQueryWrite.java b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1alpha2/MockBigQueryWrite.java index 14652dc6d0..543996d5e5 100644 --- a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1alpha2/MockBigQueryWrite.java +++ b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1alpha2/MockBigQueryWrite.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,7 +13,6 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - package com.google.cloud.bigquery.storage.v1alpha2; import com.google.api.core.BetaApi; @@ -21,10 +20,9 @@ import com.google.protobuf.AbstractMessage; import io.grpc.ServerServiceDefinition; import java.util.List; -import javax.annotation.Generated; +@javax.annotation.Generated("by GAPIC") @BetaApi -@Generated("by gapic-generator-java") public class MockBigQueryWrite implements MockGrpcService { private final MockBigQueryWriteImpl serviceImpl; diff --git a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1alpha2/MockBigQueryWriteImpl.java b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1alpha2/MockBigQueryWriteImpl.java index e63712321a..ecc8e99e05 100644 --- a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1alpha2/MockBigQueryWriteImpl.java +++ b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1alpha2/MockBigQueryWriteImpl.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,21 +13,30 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - package com.google.cloud.bigquery.storage.v1alpha2; import com.google.api.core.BetaApi; import com.google.cloud.bigquery.storage.v1alpha2.BigQueryWriteGrpc.BigQueryWriteImplBase; +import com.google.cloud.bigquery.storage.v1alpha2.Storage.AppendRowsRequest; +import com.google.cloud.bigquery.storage.v1alpha2.Storage.AppendRowsResponse; +import com.google.cloud.bigquery.storage.v1alpha2.Storage.BatchCommitWriteStreamsRequest; +import com.google.cloud.bigquery.storage.v1alpha2.Storage.BatchCommitWriteStreamsResponse; +import com.google.cloud.bigquery.storage.v1alpha2.Storage.CreateWriteStreamRequest; +import com.google.cloud.bigquery.storage.v1alpha2.Storage.FinalizeWriteStreamRequest; +import com.google.cloud.bigquery.storage.v1alpha2.Storage.FinalizeWriteStreamResponse; +import com.google.cloud.bigquery.storage.v1alpha2.Storage.FlushRowsRequest; +import com.google.cloud.bigquery.storage.v1alpha2.Storage.FlushRowsResponse; +import com.google.cloud.bigquery.storage.v1alpha2.Storage.GetWriteStreamRequest; +import com.google.cloud.bigquery.storage.v1alpha2.Stream.WriteStream; import com.google.protobuf.AbstractMessage; import io.grpc.stub.StreamObserver; import java.util.ArrayList; import java.util.LinkedList; import java.util.List; import java.util.Queue; -import javax.annotation.Generated; +@javax.annotation.Generated("by GAPIC") @BetaApi -@Generated("by gapic-generator-java") public class MockBigQueryWriteImpl extends BigQueryWriteImplBase { private List requests; private Queue responses; @@ -60,33 +69,32 @@ public void reset() { @Override public void createWriteStream( - Storage.CreateWriteStreamRequest request, - StreamObserver responseObserver) { + CreateWriteStreamRequest request, StreamObserver responseObserver) { Object response = responses.remove(); - if (response instanceof Stream.WriteStream) { + if (response instanceof WriteStream) { requests.add(request); - responseObserver.onNext(((Stream.WriteStream) response)); + responseObserver.onNext((WriteStream) response); responseObserver.onCompleted(); } else if (response instanceof Exception) { - responseObserver.onError(((Exception) response)); + responseObserver.onError((Exception) response); } else { responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); } } @Override - public StreamObserver appendRows( - final StreamObserver responseObserver) { - StreamObserver requestObserver = - new StreamObserver() { + public StreamObserver appendRows( + final StreamObserver responseObserver) { + StreamObserver requestObserver = + new StreamObserver() { @Override - public void onNext(Storage.AppendRowsRequest value) { + public void onNext(AppendRowsRequest value) { requests.add(value); final Object response = responses.remove(); - if (response instanceof Storage.AppendRowsResponse) { - responseObserver.onNext(((Storage.AppendRowsResponse) response)); + if (response instanceof AppendRowsResponse) { + responseObserver.onNext((AppendRowsResponse) response); } else if (response instanceof Exception) { - responseObserver.onError(((Exception) response)); + responseObserver.onError((Exception) response); } else { responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); } @@ -107,14 +115,14 @@ public void onCompleted() { @Override public void getWriteStream( - Storage.GetWriteStreamRequest request, StreamObserver responseObserver) { + GetWriteStreamRequest request, StreamObserver responseObserver) { Object response = responses.remove(); - if (response instanceof Stream.WriteStream) { + if (response instanceof WriteStream) { requests.add(request); - responseObserver.onNext(((Stream.WriteStream) response)); + responseObserver.onNext((WriteStream) response); responseObserver.onCompleted(); } else if (response instanceof Exception) { - responseObserver.onError(((Exception) response)); + responseObserver.onError((Exception) response); } else { responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); } @@ -122,15 +130,15 @@ public void getWriteStream( @Override public void finalizeWriteStream( - Storage.FinalizeWriteStreamRequest request, - StreamObserver responseObserver) { + FinalizeWriteStreamRequest request, + StreamObserver responseObserver) { Object response = responses.remove(); - if (response instanceof Storage.FinalizeWriteStreamResponse) { + if (response instanceof FinalizeWriteStreamResponse) { requests.add(request); - responseObserver.onNext(((Storage.FinalizeWriteStreamResponse) response)); + responseObserver.onNext((FinalizeWriteStreamResponse) response); responseObserver.onCompleted(); } else if (response instanceof Exception) { - responseObserver.onError(((Exception) response)); + responseObserver.onError((Exception) response); } else { responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); } @@ -138,15 +146,15 @@ public void finalizeWriteStream( @Override public void batchCommitWriteStreams( - Storage.BatchCommitWriteStreamsRequest request, - StreamObserver responseObserver) { + BatchCommitWriteStreamsRequest request, + StreamObserver responseObserver) { Object response = responses.remove(); - if (response instanceof Storage.BatchCommitWriteStreamsResponse) { + if (response instanceof BatchCommitWriteStreamsResponse) { requests.add(request); - responseObserver.onNext(((Storage.BatchCommitWriteStreamsResponse) response)); + responseObserver.onNext((BatchCommitWriteStreamsResponse) response); responseObserver.onCompleted(); } else if (response instanceof Exception) { - responseObserver.onError(((Exception) response)); + responseObserver.onError((Exception) response); } else { responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); } @@ -154,15 +162,14 @@ public void batchCommitWriteStreams( @Override public void flushRows( - Storage.FlushRowsRequest request, - StreamObserver responseObserver) { + FlushRowsRequest request, StreamObserver responseObserver) { Object response = responses.remove(); - if (response instanceof Storage.FlushRowsResponse) { + if (response instanceof FlushRowsResponse) { requests.add(request); - responseObserver.onNext(((Storage.FlushRowsResponse) response)); + responseObserver.onNext((FlushRowsResponse) response); responseObserver.onCompleted(); } else if (response instanceof Exception) { - responseObserver.onError(((Exception) response)); + responseObserver.onError((Exception) response); } else { responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); } diff --git a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta1/BaseBigQueryStorageClientTest.java b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta1/BaseBigQueryStorageClientTest.java index f0663837ef..7d3c752e11 100644 --- a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta1/BaseBigQueryStorageClientTest.java +++ b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta1/BaseBigQueryStorageClientTest.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,7 +13,6 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - package com.google.cloud.bigquery.storage.v1beta1; import com.google.api.gax.core.NoCredentialsProvider; @@ -26,15 +25,27 @@ import com.google.api.gax.rpc.InvalidArgumentException; import com.google.api.gax.rpc.ServerStreamingCallable; import com.google.api.gax.rpc.StatusCode; +import com.google.cloud.bigquery.storage.v1beta1.Storage.BatchCreateReadSessionStreamsRequest; +import com.google.cloud.bigquery.storage.v1beta1.Storage.BatchCreateReadSessionStreamsResponse; +import com.google.cloud.bigquery.storage.v1beta1.Storage.CreateReadSessionRequest; +import com.google.cloud.bigquery.storage.v1beta1.Storage.FinalizeStreamRequest; +import com.google.cloud.bigquery.storage.v1beta1.Storage.ReadRowsRequest; +import com.google.cloud.bigquery.storage.v1beta1.Storage.ReadRowsResponse; +import com.google.cloud.bigquery.storage.v1beta1.Storage.ReadSession; +import com.google.cloud.bigquery.storage.v1beta1.Storage.SplitReadStreamRequest; +import com.google.cloud.bigquery.storage.v1beta1.Storage.SplitReadStreamResponse; +import com.google.cloud.bigquery.storage.v1beta1.Storage.Stream; +import com.google.cloud.bigquery.storage.v1beta1.Storage.StreamPosition; +import com.google.cloud.bigquery.storage.v1beta1.TableReferenceProto.TableReference; import com.google.protobuf.AbstractMessage; import com.google.protobuf.Empty; +import io.grpc.Status; import io.grpc.StatusRuntimeException; import java.io.IOException; import java.util.Arrays; import java.util.List; import java.util.UUID; import java.util.concurrent.ExecutionException; -import javax.annotation.Generated; import org.junit.After; import org.junit.AfterClass; import org.junit.Assert; @@ -42,31 +53,31 @@ import org.junit.BeforeClass; import org.junit.Test; -@Generated("by gapic-generator-java") +@javax.annotation.Generated("by GAPIC") public class BaseBigQueryStorageClientTest { private static MockBigQueryStorage mockBigQueryStorage; - private static MockServiceHelper mockServiceHelper; + private static MockServiceHelper serviceHelper; private BaseBigQueryStorageClient client; private LocalChannelProvider channelProvider; @BeforeClass public static void startStaticServer() { mockBigQueryStorage = new MockBigQueryStorage(); - mockServiceHelper = + serviceHelper = new MockServiceHelper( UUID.randomUUID().toString(), Arrays.asList(mockBigQueryStorage)); - mockServiceHelper.start(); + serviceHelper.start(); } @AfterClass public static void stopServer() { - mockServiceHelper.stop(); + serviceHelper.stop(); } @Before public void setUp() throws IOException { - mockServiceHelper.reset(); - channelProvider = mockServiceHelper.createChannelProvider(); + serviceHelper.reset(); + channelProvider = serviceHelper.createChannelProvider(); BaseBigQueryStorageSettings settings = BaseBigQueryStorageSettings.newBuilder() .setTransportChannelProvider(channelProvider) @@ -81,26 +92,25 @@ public void tearDown() throws Exception { } @Test - public void createReadSessionTest() throws Exception { - Storage.ReadSession expectedResponse = Storage.ReadSession.newBuilder().build(); + @SuppressWarnings("all") + public void createReadSessionTest() { + ReadSessionName name = ReadSessionName.of("[PROJECT]", "[LOCATION]", "[SESSION]"); + ReadSession expectedResponse = ReadSession.newBuilder().setName(name.toString()).build(); mockBigQueryStorage.addResponse(expectedResponse); - TableReferenceProto.TableReference tableReference = - TableReferenceProto.TableReference.newBuilder().build(); + TableReference tableReference = TableReference.newBuilder().build(); ProjectName parent = ProjectName.of("[PROJECT]"); int requestedStreams = 1017221410; - Storage.ReadSession actualResponse = - client.createReadSession(tableReference, parent, requestedStreams); + ReadSession actualResponse = client.createReadSession(tableReference, parent, requestedStreams); Assert.assertEquals(expectedResponse, actualResponse); List actualRequests = mockBigQueryStorage.getRequests(); Assert.assertEquals(1, actualRequests.size()); - Storage.CreateReadSessionRequest actualRequest = - ((Storage.CreateReadSessionRequest) actualRequests.get(0)); + CreateReadSessionRequest actualRequest = (CreateReadSessionRequest) actualRequests.get(0); Assert.assertEquals(tableReference, actualRequest.getTableReference()); - Assert.assertEquals(parent.toString(), actualRequest.getParent()); + Assert.assertEquals(parent, ProjectName.parse(actualRequest.getParent())); Assert.assertEquals(requestedStreams, actualRequest.getRequestedStreams()); Assert.assertTrue( channelProvider.isHeaderSent( @@ -109,123 +119,83 @@ public void createReadSessionTest() throws Exception { } @Test + @SuppressWarnings("all") public void createReadSessionExceptionTest() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); + StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); mockBigQueryStorage.addException(exception); try { - TableReferenceProto.TableReference tableReference = - TableReferenceProto.TableReference.newBuilder().build(); + TableReference tableReference = TableReference.newBuilder().build(); ProjectName parent = ProjectName.of("[PROJECT]"); int requestedStreams = 1017221410; - client.createReadSession(tableReference, parent, requestedStreams); - Assert.fail("No exception raised"); - } catch (InvalidArgumentException e) { - // Expected exception. - } - } - - @Test - public void createReadSessionTest2() throws Exception { - Storage.ReadSession expectedResponse = Storage.ReadSession.newBuilder().build(); - mockBigQueryStorage.addResponse(expectedResponse); - - TableReferenceProto.TableReference tableReference = - TableReferenceProto.TableReference.newBuilder().build(); - String parent = "parent-995424086"; - int requestedStreams = 1017221410; - - Storage.ReadSession actualResponse = - client.createReadSession(tableReference, parent, requestedStreams); - Assert.assertEquals(expectedResponse, actualResponse); - - List actualRequests = mockBigQueryStorage.getRequests(); - Assert.assertEquals(1, actualRequests.size()); - Storage.CreateReadSessionRequest actualRequest = - ((Storage.CreateReadSessionRequest) actualRequests.get(0)); - Assert.assertEquals(tableReference, actualRequest.getTableReference()); - Assert.assertEquals(parent, actualRequest.getParent()); - Assert.assertEquals(requestedStreams, actualRequest.getRequestedStreams()); - Assert.assertTrue( - channelProvider.isHeaderSent( - ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), - GaxGrpcProperties.getDefaultApiClientHeaderPattern())); - } - - @Test - public void createReadSessionExceptionTest2() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); - mockBigQueryStorage.addException(exception); - - try { - TableReferenceProto.TableReference tableReference = - TableReferenceProto.TableReference.newBuilder().build(); - String parent = "parent-995424086"; - int requestedStreams = 1017221410; client.createReadSession(tableReference, parent, requestedStreams); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { - // Expected exception. + // Expected exception } } @Test + @SuppressWarnings("all") public void readRowsTest() throws Exception { - Storage.ReadRowsResponse expectedResponse = Storage.ReadRowsResponse.newBuilder().build(); + long rowCount = 1340416618L; + ReadRowsResponse expectedResponse = ReadRowsResponse.newBuilder().setRowCount(rowCount).build(); mockBigQueryStorage.addResponse(expectedResponse); - Storage.ReadRowsRequest request = Storage.ReadRowsRequest.newBuilder().build(); + StreamPosition readPosition = StreamPosition.newBuilder().build(); + ReadRowsRequest request = ReadRowsRequest.newBuilder().setReadPosition(readPosition).build(); - MockStreamObserver responseObserver = new MockStreamObserver<>(); + MockStreamObserver responseObserver = new MockStreamObserver<>(); - ServerStreamingCallable callable = - client.readRowsCallable(); + ServerStreamingCallable callable = client.readRowsCallable(); callable.serverStreamingCall(request, responseObserver); - List actualResponses = responseObserver.future().get(); + List actualResponses = responseObserver.future().get(); Assert.assertEquals(1, actualResponses.size()); Assert.assertEquals(expectedResponse, actualResponses.get(0)); } @Test + @SuppressWarnings("all") public void readRowsExceptionTest() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); + StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); mockBigQueryStorage.addException(exception); - Storage.ReadRowsRequest request = Storage.ReadRowsRequest.newBuilder().build(); + StreamPosition readPosition = StreamPosition.newBuilder().build(); + ReadRowsRequest request = ReadRowsRequest.newBuilder().setReadPosition(readPosition).build(); - MockStreamObserver responseObserver = new MockStreamObserver<>(); + MockStreamObserver responseObserver = new MockStreamObserver<>(); - ServerStreamingCallable callable = - client.readRowsCallable(); + ServerStreamingCallable callable = client.readRowsCallable(); callable.serverStreamingCall(request, responseObserver); try { - List actualResponses = responseObserver.future().get(); + List actualResponses = responseObserver.future().get(); Assert.fail("No exception thrown"); } catch (ExecutionException e) { Assert.assertTrue(e.getCause() instanceof InvalidArgumentException); - InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause()); + InvalidArgumentException apiException = (InvalidArgumentException) e.getCause(); Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode()); } } @Test - public void batchCreateReadSessionStreamsTest() throws Exception { - Storage.BatchCreateReadSessionStreamsResponse expectedResponse = - Storage.BatchCreateReadSessionStreamsResponse.newBuilder().build(); + @SuppressWarnings("all") + public void batchCreateReadSessionStreamsTest() { + BatchCreateReadSessionStreamsResponse expectedResponse = + BatchCreateReadSessionStreamsResponse.newBuilder().build(); mockBigQueryStorage.addResponse(expectedResponse); - Storage.ReadSession session = Storage.ReadSession.newBuilder().build(); + ReadSession session = ReadSession.newBuilder().build(); int requestedStreams = 1017221410; - Storage.BatchCreateReadSessionStreamsResponse actualResponse = + BatchCreateReadSessionStreamsResponse actualResponse = client.batchCreateReadSessionStreams(session, requestedStreams); Assert.assertEquals(expectedResponse, actualResponse); List actualRequests = mockBigQueryStorage.getRequests(); Assert.assertEquals(1, actualRequests.size()); - Storage.BatchCreateReadSessionStreamsRequest actualRequest = - ((Storage.BatchCreateReadSessionStreamsRequest) actualRequests.get(0)); + BatchCreateReadSessionStreamsRequest actualRequest = + (BatchCreateReadSessionStreamsRequest) actualRequests.get(0); Assert.assertEquals(session, actualRequest.getSession()); Assert.assertEquals(requestedStreams, actualRequest.getRequestedStreams()); @@ -236,33 +206,35 @@ public void batchCreateReadSessionStreamsTest() throws Exception { } @Test + @SuppressWarnings("all") public void batchCreateReadSessionStreamsExceptionTest() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); + StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); mockBigQueryStorage.addException(exception); try { - Storage.ReadSession session = Storage.ReadSession.newBuilder().build(); + ReadSession session = ReadSession.newBuilder().build(); int requestedStreams = 1017221410; + client.batchCreateReadSessionStreams(session, requestedStreams); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { - // Expected exception. + // Expected exception } } @Test - public void finalizeStreamTest() throws Exception { + @SuppressWarnings("all") + public void finalizeStreamTest() { Empty expectedResponse = Empty.newBuilder().build(); mockBigQueryStorage.addResponse(expectedResponse); - Storage.Stream stream = Storage.Stream.newBuilder().build(); + Stream stream = Stream.newBuilder().build(); client.finalizeStream(stream); List actualRequests = mockBigQueryStorage.getRequests(); Assert.assertEquals(1, actualRequests.size()); - Storage.FinalizeStreamRequest actualRequest = - ((Storage.FinalizeStreamRequest) actualRequests.get(0)); + FinalizeStreamRequest actualRequest = (FinalizeStreamRequest) actualRequests.get(0); Assert.assertEquals(stream, actualRequest.getStream()); Assert.assertTrue( @@ -272,34 +244,35 @@ public void finalizeStreamTest() throws Exception { } @Test + @SuppressWarnings("all") public void finalizeStreamExceptionTest() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); + StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); mockBigQueryStorage.addException(exception); try { - Storage.Stream stream = Storage.Stream.newBuilder().build(); + Stream stream = Stream.newBuilder().build(); + client.finalizeStream(stream); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { - // Expected exception. + // Expected exception } } @Test - public void splitReadStreamTest() throws Exception { - Storage.SplitReadStreamResponse expectedResponse = - Storage.SplitReadStreamResponse.newBuilder().build(); + @SuppressWarnings("all") + public void splitReadStreamTest() { + SplitReadStreamResponse expectedResponse = SplitReadStreamResponse.newBuilder().build(); mockBigQueryStorage.addResponse(expectedResponse); - Storage.Stream originalStream = Storage.Stream.newBuilder().build(); + Stream originalStream = Stream.newBuilder().build(); - Storage.SplitReadStreamResponse actualResponse = client.splitReadStream(originalStream); + SplitReadStreamResponse actualResponse = client.splitReadStream(originalStream); Assert.assertEquals(expectedResponse, actualResponse); List actualRequests = mockBigQueryStorage.getRequests(); Assert.assertEquals(1, actualRequests.size()); - Storage.SplitReadStreamRequest actualRequest = - ((Storage.SplitReadStreamRequest) actualRequests.get(0)); + SplitReadStreamRequest actualRequest = (SplitReadStreamRequest) actualRequests.get(0); Assert.assertEquals(originalStream, actualRequest.getOriginalStream()); Assert.assertTrue( @@ -309,16 +282,18 @@ public void splitReadStreamTest() throws Exception { } @Test + @SuppressWarnings("all") public void splitReadStreamExceptionTest() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); + StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); mockBigQueryStorage.addException(exception); try { - Storage.Stream originalStream = Storage.Stream.newBuilder().build(); + Stream originalStream = Stream.newBuilder().build(); + client.splitReadStream(originalStream); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { - // Expected exception. + // Expected exception } } } diff --git a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta1/MockBigQueryStorage.java b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta1/MockBigQueryStorage.java index 36e2257abe..6110c0f370 100644 --- a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta1/MockBigQueryStorage.java +++ b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta1/MockBigQueryStorage.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,7 +13,6 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - package com.google.cloud.bigquery.storage.v1beta1; import com.google.api.core.BetaApi; @@ -21,10 +20,9 @@ import com.google.protobuf.AbstractMessage; import io.grpc.ServerServiceDefinition; import java.util.List; -import javax.annotation.Generated; +@javax.annotation.Generated("by GAPIC") @BetaApi -@Generated("by gapic-generator-java") public class MockBigQueryStorage implements MockGrpcService { private final MockBigQueryStorageImpl serviceImpl; diff --git a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta1/MockBigQueryStorageImpl.java b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta1/MockBigQueryStorageImpl.java index 79dc8f2ca2..41197eb3e1 100644 --- a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta1/MockBigQueryStorageImpl.java +++ b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta1/MockBigQueryStorageImpl.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,11 +13,19 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - package com.google.cloud.bigquery.storage.v1beta1; import com.google.api.core.BetaApi; import com.google.cloud.bigquery.storage.v1beta1.BigQueryStorageGrpc.BigQueryStorageImplBase; +import com.google.cloud.bigquery.storage.v1beta1.Storage.BatchCreateReadSessionStreamsRequest; +import com.google.cloud.bigquery.storage.v1beta1.Storage.BatchCreateReadSessionStreamsResponse; +import com.google.cloud.bigquery.storage.v1beta1.Storage.CreateReadSessionRequest; +import com.google.cloud.bigquery.storage.v1beta1.Storage.FinalizeStreamRequest; +import com.google.cloud.bigquery.storage.v1beta1.Storage.ReadRowsRequest; +import com.google.cloud.bigquery.storage.v1beta1.Storage.ReadRowsResponse; +import com.google.cloud.bigquery.storage.v1beta1.Storage.ReadSession; +import com.google.cloud.bigquery.storage.v1beta1.Storage.SplitReadStreamRequest; +import com.google.cloud.bigquery.storage.v1beta1.Storage.SplitReadStreamResponse; import com.google.protobuf.AbstractMessage; import com.google.protobuf.Empty; import io.grpc.stub.StreamObserver; @@ -25,10 +33,9 @@ import java.util.LinkedList; import java.util.List; import java.util.Queue; -import javax.annotation.Generated; +@javax.annotation.Generated("by GAPIC") @BetaApi -@Generated("by gapic-generator-java") public class MockBigQueryStorageImpl extends BigQueryStorageImplBase { private List requests; private Queue responses; @@ -61,30 +68,28 @@ public void reset() { @Override public void createReadSession( - Storage.CreateReadSessionRequest request, - StreamObserver responseObserver) { + CreateReadSessionRequest request, StreamObserver responseObserver) { Object response = responses.remove(); - if (response instanceof Storage.ReadSession) { + if (response instanceof ReadSession) { requests.add(request); - responseObserver.onNext(((Storage.ReadSession) response)); + responseObserver.onNext((ReadSession) response); responseObserver.onCompleted(); } else if (response instanceof Exception) { - responseObserver.onError(((Exception) response)); + responseObserver.onError((Exception) response); } else { responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); } } @Override - public void readRows( - Storage.ReadRowsRequest request, StreamObserver responseObserver) { + public void readRows(ReadRowsRequest request, StreamObserver responseObserver) { Object response = responses.remove(); - if (response instanceof Storage.ReadRowsResponse) { + if (response instanceof ReadRowsResponse) { requests.add(request); - responseObserver.onNext(((Storage.ReadRowsResponse) response)); + responseObserver.onNext((ReadRowsResponse) response); responseObserver.onCompleted(); } else if (response instanceof Exception) { - responseObserver.onError(((Exception) response)); + responseObserver.onError((Exception) response); } else { responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); } @@ -92,15 +97,15 @@ public void readRows( @Override public void batchCreateReadSessionStreams( - Storage.BatchCreateReadSessionStreamsRequest request, - StreamObserver responseObserver) { + BatchCreateReadSessionStreamsRequest request, + StreamObserver responseObserver) { Object response = responses.remove(); - if (response instanceof Storage.BatchCreateReadSessionStreamsResponse) { + if (response instanceof BatchCreateReadSessionStreamsResponse) { requests.add(request); - responseObserver.onNext(((Storage.BatchCreateReadSessionStreamsResponse) response)); + responseObserver.onNext((BatchCreateReadSessionStreamsResponse) response); responseObserver.onCompleted(); } else if (response instanceof Exception) { - responseObserver.onError(((Exception) response)); + responseObserver.onError((Exception) response); } else { responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); } @@ -108,14 +113,14 @@ public void batchCreateReadSessionStreams( @Override public void finalizeStream( - Storage.FinalizeStreamRequest request, StreamObserver responseObserver) { + FinalizeStreamRequest request, StreamObserver responseObserver) { Object response = responses.remove(); if (response instanceof Empty) { requests.add(request); - responseObserver.onNext(((Empty) response)); + responseObserver.onNext((Empty) response); responseObserver.onCompleted(); } else if (response instanceof Exception) { - responseObserver.onError(((Exception) response)); + responseObserver.onError((Exception) response); } else { responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); } @@ -123,15 +128,14 @@ public void finalizeStream( @Override public void splitReadStream( - Storage.SplitReadStreamRequest request, - StreamObserver responseObserver) { + SplitReadStreamRequest request, StreamObserver responseObserver) { Object response = responses.remove(); - if (response instanceof Storage.SplitReadStreamResponse) { + if (response instanceof SplitReadStreamResponse) { requests.add(request); - responseObserver.onNext(((Storage.SplitReadStreamResponse) response)); + responseObserver.onNext((SplitReadStreamResponse) response); responseObserver.onCompleted(); } else if (response instanceof Exception) { - responseObserver.onError(((Exception) response)); + responseObserver.onError((Exception) response); } else { responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); } diff --git a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/BaseBigQueryReadClientTest.java b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/BaseBigQueryReadClientTest.java index e144d138a7..24966efa77 100644 --- a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/BaseBigQueryReadClientTest.java +++ b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/BaseBigQueryReadClientTest.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,7 +13,6 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - package com.google.cloud.bigquery.storage.v1beta2; import com.google.api.gax.core.NoCredentialsProvider; @@ -27,15 +26,13 @@ import com.google.api.gax.rpc.ServerStreamingCallable; import com.google.api.gax.rpc.StatusCode; import com.google.protobuf.AbstractMessage; -import com.google.protobuf.Timestamp; +import io.grpc.Status; import io.grpc.StatusRuntimeException; import java.io.IOException; -import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.UUID; import java.util.concurrent.ExecutionException; -import javax.annotation.Generated; import org.junit.After; import org.junit.AfterClass; import org.junit.Assert; @@ -43,31 +40,34 @@ import org.junit.BeforeClass; import org.junit.Test; -@Generated("by gapic-generator-java") +@javax.annotation.Generated("by GAPIC") public class BaseBigQueryReadClientTest { private static MockBigQueryRead mockBigQueryRead; - private static MockServiceHelper mockServiceHelper; + private static MockBigQueryWrite mockBigQueryWrite; + private static MockServiceHelper serviceHelper; private BaseBigQueryReadClient client; private LocalChannelProvider channelProvider; @BeforeClass public static void startStaticServer() { mockBigQueryRead = new MockBigQueryRead(); - mockServiceHelper = + mockBigQueryWrite = new MockBigQueryWrite(); + serviceHelper = new MockServiceHelper( - UUID.randomUUID().toString(), Arrays.asList(mockBigQueryRead)); - mockServiceHelper.start(); + UUID.randomUUID().toString(), + Arrays.asList(mockBigQueryRead, mockBigQueryWrite)); + serviceHelper.start(); } @AfterClass public static void stopServer() { - mockServiceHelper.stop(); + serviceHelper.stop(); } @Before public void setUp() throws IOException { - mockServiceHelper.reset(); - channelProvider = mockServiceHelper.createChannelProvider(); + serviceHelper.reset(); + channelProvider = serviceHelper.createChannelProvider(); BaseBigQueryReadSettings settings = BaseBigQueryReadSettings.newBuilder() .setTransportChannelProvider(channelProvider) @@ -82,14 +82,12 @@ public void tearDown() throws Exception { } @Test - public void createReadSessionTest() throws Exception { + @SuppressWarnings("all") + public void createReadSessionTest() { + ReadSessionName name = ReadSessionName.of("[PROJECT]", "[LOCATION]", "[SESSION]"); + TableName table = TableName.of("[PROJECT]", "[DATASET]", "[TABLE]"); ReadSession expectedResponse = - ReadSession.newBuilder() - .setName("name3373707") - .setExpireTime(Timestamp.newBuilder().build()) - .setTable(TableName.of("[PROJECT]", "[DATASET]", "[TABLE]").toString()) - .addAllStreams(new ArrayList()) - .build(); + ReadSession.newBuilder().setName(name.toString()).setTable(table.toString()).build(); mockBigQueryRead.addResponse(expectedResponse); ProjectName parent = ProjectName.of("[PROJECT]"); @@ -101,9 +99,9 @@ public void createReadSessionTest() throws Exception { List actualRequests = mockBigQueryRead.getRequests(); Assert.assertEquals(1, actualRequests.size()); - CreateReadSessionRequest actualRequest = ((CreateReadSessionRequest) actualRequests.get(0)); + CreateReadSessionRequest actualRequest = (CreateReadSessionRequest) actualRequests.get(0); - Assert.assertEquals(parent.toString(), actualRequest.getParent()); + Assert.assertEquals(parent, ProjectName.parse(actualRequest.getParent())); Assert.assertEquals(readSession, actualRequest.getReadSession()); Assert.assertEquals(maxStreamCount, actualRequest.getMaxStreamCount()); Assert.assertTrue( @@ -113,83 +111,33 @@ public void createReadSessionTest() throws Exception { } @Test + @SuppressWarnings("all") public void createReadSessionExceptionTest() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); + StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); mockBigQueryRead.addException(exception); try { ProjectName parent = ProjectName.of("[PROJECT]"); ReadSession readSession = ReadSession.newBuilder().build(); int maxStreamCount = 940837515; - client.createReadSession(parent, readSession, maxStreamCount); - Assert.fail("No exception raised"); - } catch (InvalidArgumentException e) { - // Expected exception. - } - } - - @Test - public void createReadSessionTest2() throws Exception { - ReadSession expectedResponse = - ReadSession.newBuilder() - .setName("name3373707") - .setExpireTime(Timestamp.newBuilder().build()) - .setTable(TableName.of("[PROJECT]", "[DATASET]", "[TABLE]").toString()) - .addAllStreams(new ArrayList()) - .build(); - mockBigQueryRead.addResponse(expectedResponse); - - String parent = "parent-995424086"; - ReadSession readSession = ReadSession.newBuilder().build(); - int maxStreamCount = 940837515; - - ReadSession actualResponse = client.createReadSession(parent, readSession, maxStreamCount); - Assert.assertEquals(expectedResponse, actualResponse); - - List actualRequests = mockBigQueryRead.getRequests(); - Assert.assertEquals(1, actualRequests.size()); - CreateReadSessionRequest actualRequest = ((CreateReadSessionRequest) actualRequests.get(0)); - Assert.assertEquals(parent, actualRequest.getParent()); - Assert.assertEquals(readSession, actualRequest.getReadSession()); - Assert.assertEquals(maxStreamCount, actualRequest.getMaxStreamCount()); - Assert.assertTrue( - channelProvider.isHeaderSent( - ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), - GaxGrpcProperties.getDefaultApiClientHeaderPattern())); - } - - @Test - public void createReadSessionExceptionTest2() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); - mockBigQueryRead.addException(exception); - - try { - String parent = "parent-995424086"; - ReadSession readSession = ReadSession.newBuilder().build(); - int maxStreamCount = 940837515; client.createReadSession(parent, readSession, maxStreamCount); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { - // Expected exception. + // Expected exception } } @Test + @SuppressWarnings("all") public void readRowsTest() throws Exception { - ReadRowsResponse expectedResponse = - ReadRowsResponse.newBuilder() - .setRowCount(1340416618) - .setStats(StreamStats.newBuilder().build()) - .setThrottleState(ThrottleState.newBuilder().build()) - .build(); + long rowCount = 1340416618L; + ReadRowsResponse expectedResponse = ReadRowsResponse.newBuilder().setRowCount(rowCount).build(); mockBigQueryRead.addResponse(expectedResponse); + ReadStreamName readStream = + ReadStreamName.of("[PROJECT]", "[LOCATION]", "[SESSION]", "[STREAM]"); ReadRowsRequest request = - ReadRowsRequest.newBuilder() - .setReadStream( - ReadStreamName.of("[PROJECT]", "[LOCATION]", "[SESSION]", "[STREAM]").toString()) - .setOffset(-1019779949) - .build(); + ReadRowsRequest.newBuilder().setReadStream(readStream.toString()).build(); MockStreamObserver responseObserver = new MockStreamObserver<>(); @@ -202,15 +150,14 @@ public void readRowsTest() throws Exception { } @Test + @SuppressWarnings("all") public void readRowsExceptionTest() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); + StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); mockBigQueryRead.addException(exception); + ReadStreamName readStream = + ReadStreamName.of("[PROJECT]", "[LOCATION]", "[SESSION]", "[STREAM]"); ReadRowsRequest request = - ReadRowsRequest.newBuilder() - .setReadStream( - ReadStreamName.of("[PROJECT]", "[LOCATION]", "[SESSION]", "[STREAM]").toString()) - .setOffset(-1019779949) - .build(); + ReadRowsRequest.newBuilder().setReadStream(readStream.toString()).build(); MockStreamObserver responseObserver = new MockStreamObserver<>(); @@ -222,36 +169,29 @@ public void readRowsExceptionTest() throws Exception { Assert.fail("No exception thrown"); } catch (ExecutionException e) { Assert.assertTrue(e.getCause() instanceof InvalidArgumentException); - InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause()); + InvalidArgumentException apiException = (InvalidArgumentException) e.getCause(); Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode()); } } @Test - public void splitReadStreamTest() throws Exception { - SplitReadStreamResponse expectedResponse = - SplitReadStreamResponse.newBuilder() - .setPrimaryStream(ReadStream.newBuilder().build()) - .setRemainderStream(ReadStream.newBuilder().build()) - .build(); + @SuppressWarnings("all") + public void splitReadStreamTest() { + SplitReadStreamResponse expectedResponse = SplitReadStreamResponse.newBuilder().build(); mockBigQueryRead.addResponse(expectedResponse); + ReadStreamName name = ReadStreamName.of("[PROJECT]", "[LOCATION]", "[SESSION]", "[STREAM]"); SplitReadStreamRequest request = - SplitReadStreamRequest.newBuilder() - .setName( - ReadStreamName.of("[PROJECT]", "[LOCATION]", "[SESSION]", "[STREAM]").toString()) - .setFraction(-1653751294) - .build(); + SplitReadStreamRequest.newBuilder().setName(name.toString()).build(); SplitReadStreamResponse actualResponse = client.splitReadStream(request); Assert.assertEquals(expectedResponse, actualResponse); List actualRequests = mockBigQueryRead.getRequests(); Assert.assertEquals(1, actualRequests.size()); - SplitReadStreamRequest actualRequest = ((SplitReadStreamRequest) actualRequests.get(0)); + SplitReadStreamRequest actualRequest = (SplitReadStreamRequest) actualRequests.get(0); - Assert.assertEquals(request.getName(), actualRequest.getName()); - Assert.assertEquals(request.getFraction(), actualRequest.getFraction(), 0.0001); + Assert.assertEquals(name, ReadStreamName.parse(actualRequest.getName())); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), @@ -259,21 +199,20 @@ public void splitReadStreamTest() throws Exception { } @Test + @SuppressWarnings("all") public void splitReadStreamExceptionTest() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); + StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); mockBigQueryRead.addException(exception); try { + ReadStreamName name = ReadStreamName.of("[PROJECT]", "[LOCATION]", "[SESSION]", "[STREAM]"); SplitReadStreamRequest request = - SplitReadStreamRequest.newBuilder() - .setName( - ReadStreamName.of("[PROJECT]", "[LOCATION]", "[SESSION]", "[STREAM]").toString()) - .setFraction(-1653751294) - .build(); + SplitReadStreamRequest.newBuilder().setName(name.toString()).build(); + client.splitReadStream(request); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { - // Expected exception. + // Expected exception } } } diff --git a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/BigQueryWriteClientTest.java b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/BigQueryWriteClientTest.java index bc30aeea74..974b7bc43e 100644 --- a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/BigQueryWriteClientTest.java +++ b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/BigQueryWriteClientTest.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,7 +13,6 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - package com.google.cloud.bigquery.storage.v1beta2; import com.google.api.gax.core.NoCredentialsProvider; @@ -28,16 +27,13 @@ import com.google.api.gax.rpc.InvalidArgumentException; import com.google.api.gax.rpc.StatusCode; import com.google.protobuf.AbstractMessage; -import com.google.protobuf.Int64Value; -import com.google.protobuf.Timestamp; +import io.grpc.Status; import io.grpc.StatusRuntimeException; import java.io.IOException; -import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.UUID; import java.util.concurrent.ExecutionException; -import javax.annotation.Generated; import org.junit.After; import org.junit.AfterClass; import org.junit.Assert; @@ -45,31 +41,34 @@ import org.junit.BeforeClass; import org.junit.Test; -@Generated("by gapic-generator-java") +@javax.annotation.Generated("by GAPIC") public class BigQueryWriteClientTest { - private static MockServiceHelper mockServiceHelper; + private static MockBigQueryRead mockBigQueryRead; + private static MockBigQueryWrite mockBigQueryWrite; + private static MockServiceHelper serviceHelper; private BigQueryWriteClient client; private LocalChannelProvider channelProvider; - private static MockBigQueryWrite mockBigQueryWrite; @BeforeClass public static void startStaticServer() { + mockBigQueryRead = new MockBigQueryRead(); mockBigQueryWrite = new MockBigQueryWrite(); - mockServiceHelper = + serviceHelper = new MockServiceHelper( - UUID.randomUUID().toString(), Arrays.asList(mockBigQueryWrite)); - mockServiceHelper.start(); + UUID.randomUUID().toString(), + Arrays.asList(mockBigQueryRead, mockBigQueryWrite)); + serviceHelper.start(); } @AfterClass public static void stopServer() { - mockServiceHelper.stop(); + serviceHelper.stop(); } @Before public void setUp() throws IOException { - mockServiceHelper.reset(); - channelProvider = mockServiceHelper.createChannelProvider(); + serviceHelper.reset(); + channelProvider = serviceHelper.createChannelProvider(); BigQueryWriteSettings settings = BigQueryWriteSettings.newBuilder() .setTransportChannelProvider(channelProvider) @@ -84,14 +83,10 @@ public void tearDown() throws Exception { } @Test - public void createWriteStreamTest() throws Exception { - WriteStream expectedResponse = - WriteStream.newBuilder() - .setName(WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]").toString()) - .setCreateTime(Timestamp.newBuilder().build()) - .setCommitTime(Timestamp.newBuilder().build()) - .setTableSchema(TableSchema.newBuilder().build()) - .build(); + @SuppressWarnings("all") + public void createWriteStreamTest() { + WriteStreamName name = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]"); + WriteStream expectedResponse = WriteStream.newBuilder().setName(name.toString()).build(); mockBigQueryWrite.addResponse(expectedResponse); TableName parent = TableName.of("[PROJECT]", "[DATASET]", "[TABLE]"); @@ -102,9 +97,9 @@ public void createWriteStreamTest() throws Exception { List actualRequests = mockBigQueryWrite.getRequests(); Assert.assertEquals(1, actualRequests.size()); - CreateWriteStreamRequest actualRequest = ((CreateWriteStreamRequest) actualRequests.get(0)); + CreateWriteStreamRequest actualRequest = (CreateWriteStreamRequest) actualRequests.get(0); - Assert.assertEquals(parent.toString(), actualRequest.getParent()); + Assert.assertEquals(parent, TableName.parse(actualRequest.getParent())); Assert.assertEquals(writeStream, actualRequest.getWriteStream()); Assert.assertTrue( channelProvider.isHeaderSent( @@ -113,76 +108,31 @@ public void createWriteStreamTest() throws Exception { } @Test + @SuppressWarnings("all") public void createWriteStreamExceptionTest() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); + StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); mockBigQueryWrite.addException(exception); try { TableName parent = TableName.of("[PROJECT]", "[DATASET]", "[TABLE]"); WriteStream writeStream = WriteStream.newBuilder().build(); - client.createWriteStream(parent, writeStream); - Assert.fail("No exception raised"); - } catch (InvalidArgumentException e) { - // Expected exception. - } - } - - @Test - public void createWriteStreamTest2() throws Exception { - WriteStream expectedResponse = - WriteStream.newBuilder() - .setName(WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]").toString()) - .setCreateTime(Timestamp.newBuilder().build()) - .setCommitTime(Timestamp.newBuilder().build()) - .setTableSchema(TableSchema.newBuilder().build()) - .build(); - mockBigQueryWrite.addResponse(expectedResponse); - - String parent = "parent-995424086"; - WriteStream writeStream = WriteStream.newBuilder().build(); - - WriteStream actualResponse = client.createWriteStream(parent, writeStream); - Assert.assertEquals(expectedResponse, actualResponse); - - List actualRequests = mockBigQueryWrite.getRequests(); - Assert.assertEquals(1, actualRequests.size()); - CreateWriteStreamRequest actualRequest = ((CreateWriteStreamRequest) actualRequests.get(0)); - - Assert.assertEquals(parent, actualRequest.getParent()); - Assert.assertEquals(writeStream, actualRequest.getWriteStream()); - Assert.assertTrue( - channelProvider.isHeaderSent( - ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), - GaxGrpcProperties.getDefaultApiClientHeaderPattern())); - } - @Test - public void createWriteStreamExceptionTest2() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); - mockBigQueryWrite.addException(exception); - - try { - String parent = "parent-995424086"; - WriteStream writeStream = WriteStream.newBuilder().build(); client.createWriteStream(parent, writeStream); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { - // Expected exception. + // Expected exception } } @Test + @SuppressWarnings("all") public void appendRowsTest() throws Exception { - AppendRowsResponse expectedResponse = - AppendRowsResponse.newBuilder().setUpdatedSchema(TableSchema.newBuilder().build()).build(); + AppendRowsResponse expectedResponse = AppendRowsResponse.newBuilder().build(); mockBigQueryWrite.addResponse(expectedResponse); + WriteStreamName writeStream = + WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]"); AppendRowsRequest request = - AppendRowsRequest.newBuilder() - .setWriteStream( - WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]").toString()) - .setOffset(Int64Value.newBuilder().build()) - .setTraceId("trace_id1270300245") - .build(); + AppendRowsRequest.newBuilder().setWriteStream(writeStream.toString()).build(); MockStreamObserver responseObserver = new MockStreamObserver<>(); @@ -200,16 +150,14 @@ public void appendRowsTest() throws Exception { } @Test + @SuppressWarnings("all") public void appendRowsExceptionTest() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); + StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); mockBigQueryWrite.addException(exception); + WriteStreamName writeStream = + WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]"); AppendRowsRequest request = - AppendRowsRequest.newBuilder() - .setWriteStream( - WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]").toString()) - .setOffset(Int64Value.newBuilder().build()) - .setTraceId("trace_id1270300245") - .build(); + AppendRowsRequest.newBuilder().setWriteStream(writeStream.toString()).build(); MockStreamObserver responseObserver = new MockStreamObserver<>(); @@ -225,20 +173,16 @@ public void appendRowsExceptionTest() throws Exception { Assert.fail("No exception thrown"); } catch (ExecutionException e) { Assert.assertTrue(e.getCause() instanceof InvalidArgumentException); - InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause()); + InvalidArgumentException apiException = (InvalidArgumentException) e.getCause(); Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode()); } } @Test - public void getWriteStreamTest() throws Exception { - WriteStream expectedResponse = - WriteStream.newBuilder() - .setName(WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]").toString()) - .setCreateTime(Timestamp.newBuilder().build()) - .setCommitTime(Timestamp.newBuilder().build()) - .setTableSchema(TableSchema.newBuilder().build()) - .build(); + @SuppressWarnings("all") + public void getWriteStreamTest() { + WriteStreamName name2 = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]"); + WriteStream expectedResponse = WriteStream.newBuilder().setName(name2.toString()).build(); mockBigQueryWrite.addResponse(expectedResponse); WriteStreamName name = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]"); @@ -248,9 +192,9 @@ public void getWriteStreamTest() throws Exception { List actualRequests = mockBigQueryWrite.getRequests(); Assert.assertEquals(1, actualRequests.size()); - GetWriteStreamRequest actualRequest = ((GetWriteStreamRequest) actualRequests.get(0)); + GetWriteStreamRequest actualRequest = (GetWriteStreamRequest) actualRequests.get(0); - Assert.assertEquals(name.toString(), actualRequest.getName()); + Assert.assertEquals(name, WriteStreamName.parse(actualRequest.getName())); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), @@ -258,64 +202,27 @@ public void getWriteStreamTest() throws Exception { } @Test + @SuppressWarnings("all") public void getWriteStreamExceptionTest() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); + StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); mockBigQueryWrite.addException(exception); try { WriteStreamName name = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]"); - client.getWriteStream(name); - Assert.fail("No exception raised"); - } catch (InvalidArgumentException e) { - // Expected exception. - } - } - - @Test - public void getWriteStreamTest2() throws Exception { - WriteStream expectedResponse = - WriteStream.newBuilder() - .setName(WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]").toString()) - .setCreateTime(Timestamp.newBuilder().build()) - .setCommitTime(Timestamp.newBuilder().build()) - .setTableSchema(TableSchema.newBuilder().build()) - .build(); - mockBigQueryWrite.addResponse(expectedResponse); - - String name = "name3373707"; - - WriteStream actualResponse = client.getWriteStream(name); - Assert.assertEquals(expectedResponse, actualResponse); - - List actualRequests = mockBigQueryWrite.getRequests(); - Assert.assertEquals(1, actualRequests.size()); - GetWriteStreamRequest actualRequest = ((GetWriteStreamRequest) actualRequests.get(0)); - - Assert.assertEquals(name, actualRequest.getName()); - Assert.assertTrue( - channelProvider.isHeaderSent( - ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), - GaxGrpcProperties.getDefaultApiClientHeaderPattern())); - } - @Test - public void getWriteStreamExceptionTest2() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); - mockBigQueryWrite.addException(exception); - - try { - String name = "name3373707"; client.getWriteStream(name); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { - // Expected exception. + // Expected exception } } @Test - public void finalizeWriteStreamTest() throws Exception { + @SuppressWarnings("all") + public void finalizeWriteStreamTest() { + long rowCount = 1340416618L; FinalizeWriteStreamResponse expectedResponse = - FinalizeWriteStreamResponse.newBuilder().setRowCount(1340416618).build(); + FinalizeWriteStreamResponse.newBuilder().setRowCount(rowCount).build(); mockBigQueryWrite.addResponse(expectedResponse); WriteStreamName name = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]"); @@ -325,9 +232,9 @@ public void finalizeWriteStreamTest() throws Exception { List actualRequests = mockBigQueryWrite.getRequests(); Assert.assertEquals(1, actualRequests.size()); - FinalizeWriteStreamRequest actualRequest = ((FinalizeWriteStreamRequest) actualRequests.get(0)); + FinalizeWriteStreamRequest actualRequest = (FinalizeWriteStreamRequest) actualRequests.get(0); - Assert.assertEquals(name.toString(), actualRequest.getName()); + Assert.assertEquals(name, WriteStreamName.parse(actualRequest.getName())); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), @@ -335,62 +242,26 @@ public void finalizeWriteStreamTest() throws Exception { } @Test + @SuppressWarnings("all") public void finalizeWriteStreamExceptionTest() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); + StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); mockBigQueryWrite.addException(exception); try { WriteStreamName name = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]"); - client.finalizeWriteStream(name); - Assert.fail("No exception raised"); - } catch (InvalidArgumentException e) { - // Expected exception. - } - } - - @Test - public void finalizeWriteStreamTest2() throws Exception { - FinalizeWriteStreamResponse expectedResponse = - FinalizeWriteStreamResponse.newBuilder().setRowCount(1340416618).build(); - mockBigQueryWrite.addResponse(expectedResponse); - - String name = "name3373707"; - - FinalizeWriteStreamResponse actualResponse = client.finalizeWriteStream(name); - Assert.assertEquals(expectedResponse, actualResponse); - - List actualRequests = mockBigQueryWrite.getRequests(); - Assert.assertEquals(1, actualRequests.size()); - FinalizeWriteStreamRequest actualRequest = ((FinalizeWriteStreamRequest) actualRequests.get(0)); - - Assert.assertEquals(name, actualRequest.getName()); - Assert.assertTrue( - channelProvider.isHeaderSent( - ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), - GaxGrpcProperties.getDefaultApiClientHeaderPattern())); - } - - @Test - public void finalizeWriteStreamExceptionTest2() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); - mockBigQueryWrite.addException(exception); - try { - String name = "name3373707"; client.finalizeWriteStream(name); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { - // Expected exception. + // Expected exception } } @Test - public void batchCommitWriteStreamsTest() throws Exception { + @SuppressWarnings("all") + public void batchCommitWriteStreamsTest() { BatchCommitWriteStreamsResponse expectedResponse = - BatchCommitWriteStreamsResponse.newBuilder() - .setCommitTime(Timestamp.newBuilder().build()) - .addAllStreamErrors(new ArrayList()) - .build(); + BatchCommitWriteStreamsResponse.newBuilder().build(); mockBigQueryWrite.addResponse(expectedResponse); String parent = "parent-995424086"; @@ -401,7 +272,7 @@ public void batchCommitWriteStreamsTest() throws Exception { List actualRequests = mockBigQueryWrite.getRequests(); Assert.assertEquals(1, actualRequests.size()); BatchCommitWriteStreamsRequest actualRequest = - ((BatchCommitWriteStreamsRequest) actualRequests.get(0)); + (BatchCommitWriteStreamsRequest) actualRequests.get(0); Assert.assertEquals(parent, actualRequest.getParent()); Assert.assertTrue( @@ -411,23 +282,26 @@ public void batchCommitWriteStreamsTest() throws Exception { } @Test + @SuppressWarnings("all") public void batchCommitWriteStreamsExceptionTest() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); + StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); mockBigQueryWrite.addException(exception); try { String parent = "parent-995424086"; + client.batchCommitWriteStreams(parent); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { - // Expected exception. + // Expected exception } } @Test - public void flushRowsTest() throws Exception { - FlushRowsResponse expectedResponse = - FlushRowsResponse.newBuilder().setOffset(-1019779949).build(); + @SuppressWarnings("all") + public void flushRowsTest() { + long offset = 1019779949L; + FlushRowsResponse expectedResponse = FlushRowsResponse.newBuilder().setOffset(offset).build(); mockBigQueryWrite.addResponse(expectedResponse); WriteStreamName writeStream = @@ -438,9 +312,9 @@ public void flushRowsTest() throws Exception { List actualRequests = mockBigQueryWrite.getRequests(); Assert.assertEquals(1, actualRequests.size()); - FlushRowsRequest actualRequest = ((FlushRowsRequest) actualRequests.get(0)); + FlushRowsRequest actualRequest = (FlushRowsRequest) actualRequests.get(0); - Assert.assertEquals(writeStream.toString(), actualRequest.getWriteStream()); + Assert.assertEquals(writeStream, WriteStreamName.parse(actualRequest.getWriteStream())); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), @@ -448,53 +322,19 @@ public void flushRowsTest() throws Exception { } @Test + @SuppressWarnings("all") public void flushRowsExceptionTest() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); + StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); mockBigQueryWrite.addException(exception); try { WriteStreamName writeStream = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]"); - client.flushRows(writeStream); - Assert.fail("No exception raised"); - } catch (InvalidArgumentException e) { - // Expected exception. - } - } - - @Test - public void flushRowsTest2() throws Exception { - FlushRowsResponse expectedResponse = - FlushRowsResponse.newBuilder().setOffset(-1019779949).build(); - mockBigQueryWrite.addResponse(expectedResponse); - - String writeStream = "write_stream-1431753760"; - FlushRowsResponse actualResponse = client.flushRows(writeStream); - Assert.assertEquals(expectedResponse, actualResponse); - - List actualRequests = mockBigQueryWrite.getRequests(); - Assert.assertEquals(1, actualRequests.size()); - FlushRowsRequest actualRequest = ((FlushRowsRequest) actualRequests.get(0)); - - Assert.assertEquals(writeStream, actualRequest.getWriteStream()); - Assert.assertTrue( - channelProvider.isHeaderSent( - ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), - GaxGrpcProperties.getDefaultApiClientHeaderPattern())); - } - - @Test - public void flushRowsExceptionTest2() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); - mockBigQueryWrite.addException(exception); - - try { - String writeStream = "write_stream-1431753760"; client.flushRows(writeStream); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { - // Expected exception. + // Expected exception } } } diff --git a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/MockBigQueryRead.java b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/MockBigQueryRead.java index cd82e240fb..26d09c0ef8 100644 --- a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/MockBigQueryRead.java +++ b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/MockBigQueryRead.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,7 +13,6 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - package com.google.cloud.bigquery.storage.v1beta2; import com.google.api.core.BetaApi; @@ -21,10 +20,9 @@ import com.google.protobuf.AbstractMessage; import io.grpc.ServerServiceDefinition; import java.util.List; -import javax.annotation.Generated; +@javax.annotation.Generated("by GAPIC") @BetaApi -@Generated("by gapic-generator-java") public class MockBigQueryRead implements MockGrpcService { private final MockBigQueryReadImpl serviceImpl; diff --git a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/MockBigQueryReadImpl.java b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/MockBigQueryReadImpl.java index 1482baca9c..56d6b3d432 100644 --- a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/MockBigQueryReadImpl.java +++ b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/MockBigQueryReadImpl.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,7 +13,6 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - package com.google.cloud.bigquery.storage.v1beta2; import com.google.api.core.BetaApi; @@ -24,10 +23,9 @@ import java.util.LinkedList; import java.util.List; import java.util.Queue; -import javax.annotation.Generated; +@javax.annotation.Generated("by GAPIC") @BetaApi -@Generated("by gapic-generator-java") public class MockBigQueryReadImpl extends BigQueryReadImplBase { private List requests; private Queue responses; @@ -64,10 +62,10 @@ public void createReadSession( Object response = responses.remove(); if (response instanceof ReadSession) { requests.add(request); - responseObserver.onNext(((ReadSession) response)); + responseObserver.onNext((ReadSession) response); responseObserver.onCompleted(); } else if (response instanceof Exception) { - responseObserver.onError(((Exception) response)); + responseObserver.onError((Exception) response); } else { responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); } @@ -78,10 +76,10 @@ public void readRows(ReadRowsRequest request, StreamObserver r Object response = responses.remove(); if (response instanceof ReadRowsResponse) { requests.add(request); - responseObserver.onNext(((ReadRowsResponse) response)); + responseObserver.onNext((ReadRowsResponse) response); responseObserver.onCompleted(); } else if (response instanceof Exception) { - responseObserver.onError(((Exception) response)); + responseObserver.onError((Exception) response); } else { responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); } @@ -93,10 +91,10 @@ public void splitReadStream( Object response = responses.remove(); if (response instanceof SplitReadStreamResponse) { requests.add(request); - responseObserver.onNext(((SplitReadStreamResponse) response)); + responseObserver.onNext((SplitReadStreamResponse) response); responseObserver.onCompleted(); } else if (response instanceof Exception) { - responseObserver.onError(((Exception) response)); + responseObserver.onError((Exception) response); } else { responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); } diff --git a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/MockBigQueryWrite.java b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/MockBigQueryWrite.java index 8adf63c1f2..ea99368e82 100644 --- a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/MockBigQueryWrite.java +++ b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/MockBigQueryWrite.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,7 +13,6 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - package com.google.cloud.bigquery.storage.v1beta2; import com.google.api.core.BetaApi; @@ -21,10 +20,9 @@ import com.google.protobuf.AbstractMessage; import io.grpc.ServerServiceDefinition; import java.util.List; -import javax.annotation.Generated; +@javax.annotation.Generated("by GAPIC") @BetaApi -@Generated("by gapic-generator-java") public class MockBigQueryWrite implements MockGrpcService { private final MockBigQueryWriteImpl serviceImpl; diff --git a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/MockBigQueryWriteImpl.java b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/MockBigQueryWriteImpl.java index 078421f361..654a52574d 100644 --- a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/MockBigQueryWriteImpl.java +++ b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/MockBigQueryWriteImpl.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,7 +13,6 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - package com.google.cloud.bigquery.storage.v1beta2; import com.google.api.core.BetaApi; @@ -24,10 +23,9 @@ import java.util.LinkedList; import java.util.List; import java.util.Queue; -import javax.annotation.Generated; +@javax.annotation.Generated("by GAPIC") @BetaApi -@Generated("by gapic-generator-java") public class MockBigQueryWriteImpl extends BigQueryWriteImplBase { private List requests; private Queue responses; @@ -64,10 +62,10 @@ public void createWriteStream( Object response = responses.remove(); if (response instanceof WriteStream) { requests.add(request); - responseObserver.onNext(((WriteStream) response)); + responseObserver.onNext((WriteStream) response); responseObserver.onCompleted(); } else if (response instanceof Exception) { - responseObserver.onError(((Exception) response)); + responseObserver.onError((Exception) response); } else { responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); } @@ -83,9 +81,9 @@ public void onNext(AppendRowsRequest value) { requests.add(value); final Object response = responses.remove(); if (response instanceof AppendRowsResponse) { - responseObserver.onNext(((AppendRowsResponse) response)); + responseObserver.onNext((AppendRowsResponse) response); } else if (response instanceof Exception) { - responseObserver.onError(((Exception) response)); + responseObserver.onError((Exception) response); } else { responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); } @@ -110,10 +108,10 @@ public void getWriteStream( Object response = responses.remove(); if (response instanceof WriteStream) { requests.add(request); - responseObserver.onNext(((WriteStream) response)); + responseObserver.onNext((WriteStream) response); responseObserver.onCompleted(); } else if (response instanceof Exception) { - responseObserver.onError(((Exception) response)); + responseObserver.onError((Exception) response); } else { responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); } @@ -126,10 +124,10 @@ public void finalizeWriteStream( Object response = responses.remove(); if (response instanceof FinalizeWriteStreamResponse) { requests.add(request); - responseObserver.onNext(((FinalizeWriteStreamResponse) response)); + responseObserver.onNext((FinalizeWriteStreamResponse) response); responseObserver.onCompleted(); } else if (response instanceof Exception) { - responseObserver.onError(((Exception) response)); + responseObserver.onError((Exception) response); } else { responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); } @@ -142,10 +140,10 @@ public void batchCommitWriteStreams( Object response = responses.remove(); if (response instanceof BatchCommitWriteStreamsResponse) { requests.add(request); - responseObserver.onNext(((BatchCommitWriteStreamsResponse) response)); + responseObserver.onNext((BatchCommitWriteStreamsResponse) response); responseObserver.onCompleted(); } else if (response instanceof Exception) { - responseObserver.onError(((Exception) response)); + responseObserver.onError((Exception) response); } else { responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); } @@ -157,10 +155,10 @@ public void flushRows( Object response = responses.remove(); if (response instanceof FlushRowsResponse) { requests.add(request); - responseObserver.onNext(((FlushRowsResponse) response)); + responseObserver.onNext((FlushRowsResponse) response); responseObserver.onCompleted(); } else if (response instanceof Exception) { - responseObserver.onError(((Exception) response)); + responseObserver.onError((Exception) response); } else { responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); } diff --git a/proto-google-cloud-bigquerystorage-v1/src/main/java/com/google/cloud/bigquery/storage/v1/ProjectName.java b/proto-google-cloud-bigquerystorage-v1/src/main/java/com/google/cloud/bigquery/storage/v1/ProjectName.java index e138c838d1..0d8b2c2e12 100644 --- a/proto-google-cloud-bigquerystorage-v1/src/main/java/com/google/cloud/bigquery/storage/v1/ProjectName.java +++ b/proto-google-cloud-bigquerystorage-v1/src/main/java/com/google/cloud/bigquery/storage/v1/ProjectName.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -23,25 +23,17 @@ import java.util.ArrayList; import java.util.List; import java.util.Map; -import java.util.Objects; -import javax.annotation.Generated; -// AUTO-GENERATED DOCUMENTATION AND CLASS. -@Generated("by gapic-generator-java") +/** AUTO-GENERATED DOCUMENTATION AND CLASS */ +@javax.annotation.Generated("by GAPIC protoc plugin") public class ProjectName implements ResourceName { - private static final PathTemplate PROJECT = + + private static final PathTemplate PATH_TEMPLATE = PathTemplate.createWithoutUrlEncoding("projects/{project}"); - private volatile Map fieldValuesMap; - private final String project; - @Deprecated - protected ProjectName() { - project = null; - } + private volatile Map fieldValuesMap; - private ProjectName(Builder builder) { - project = Preconditions.checkNotNull(builder.getProject()); - } + private final String project; public String getProject() { return project; @@ -55,6 +47,10 @@ public Builder toBuilder() { return new Builder(this); } + private ProjectName(Builder builder) { + project = Preconditions.checkNotNull(builder.getProject()); + } + public static ProjectName of(String project) { return newBuilder().setProject(project).build(); } @@ -68,7 +64,7 @@ public static ProjectName parse(String formattedString) { return null; } Map matchMap = - PROJECT.validatedMatch( + PATH_TEMPLATE.validatedMatch( formattedString, "ProjectName.parse: formattedString not in valid format"); return of(matchMap.get("project")); } @@ -82,7 +78,7 @@ public static List parseList(List formattedStrings) { } public static List toStringList(List values) { - List list = new ArrayList<>(values.size()); + List list = new ArrayList(values.size()); for (ProjectName value : values) { if (value == null) { list.add(""); @@ -94,18 +90,15 @@ public static List toStringList(List values) { } public static boolean isParsableFrom(String formattedString) { - return PROJECT.matches(formattedString); + return PATH_TEMPLATE.matches(formattedString); } - @Override public Map getFieldValuesMap() { if (fieldValuesMap == null) { synchronized (this) { if (fieldValuesMap == null) { ImmutableMap.Builder fieldMapBuilder = ImmutableMap.builder(); - if (project != null) { - fieldMapBuilder.put("project", project); - } + fieldMapBuilder.put("project", project); fieldValuesMap = fieldMapBuilder.build(); } } @@ -119,34 +112,13 @@ public String getFieldValue(String fieldName) { @Override public String toString() { - return PROJECT.instantiate("project", project); - } - - @Override - public boolean equals(Object o) { - if (o == this) { - return true; - } - if (o != null || getClass() == o.getClass()) { - ProjectName that = ((ProjectName) o); - return Objects.equals(this.project, that.project); - } - return false; + return PATH_TEMPLATE.instantiate("project", project); } - @Override - public int hashCode() { - int h = 1; - h *= 1000003; - h ^= Objects.hashCode(project); - return h; - } - - /** Builder for projects/{project}. */ + /** Builder for ProjectName. */ public static class Builder { - private String project; - protected Builder() {} + private String project; public String getProject() { return project; @@ -157,6 +129,8 @@ public Builder setProject(String project) { return this; } + private Builder() {} + private Builder(ProjectName projectName) { project = projectName.project; } @@ -165,4 +139,24 @@ public ProjectName build() { return new ProjectName(this); } } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o instanceof ProjectName) { + ProjectName that = (ProjectName) o; + return (this.project.equals(that.project)); + } + return false; + } + + @Override + public int hashCode() { + int h = 1; + h *= 1000003; + h ^= project.hashCode(); + return h; + } } diff --git a/proto-google-cloud-bigquerystorage-v1/src/main/java/com/google/cloud/bigquery/storage/v1/ReadSessionName.java b/proto-google-cloud-bigquerystorage-v1/src/main/java/com/google/cloud/bigquery/storage/v1/ReadSessionName.java new file mode 100644 index 0000000000..4aa5209ddd --- /dev/null +++ b/proto-google-cloud-bigquerystorage-v1/src/main/java/com/google/cloud/bigquery/storage/v1/ReadSessionName.java @@ -0,0 +1,210 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.storage.v1; + +import com.google.api.pathtemplate.PathTemplate; +import com.google.api.resourcenames.ResourceName; +import com.google.common.base.Preconditions; +import com.google.common.collect.ImmutableMap; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; + +/** AUTO-GENERATED DOCUMENTATION AND CLASS */ +@javax.annotation.Generated("by GAPIC protoc plugin") +public class ReadSessionName implements ResourceName { + + private static final PathTemplate PATH_TEMPLATE = + PathTemplate.createWithoutUrlEncoding( + "projects/{project}/locations/{location}/sessions/{session}"); + + private volatile Map fieldValuesMap; + + private final String project; + private final String location; + private final String session; + + public String getProject() { + return project; + } + + public String getLocation() { + return location; + } + + public String getSession() { + return session; + } + + public static Builder newBuilder() { + return new Builder(); + } + + public Builder toBuilder() { + return new Builder(this); + } + + private ReadSessionName(Builder builder) { + project = Preconditions.checkNotNull(builder.getProject()); + location = Preconditions.checkNotNull(builder.getLocation()); + session = Preconditions.checkNotNull(builder.getSession()); + } + + public static ReadSessionName of(String project, String location, String session) { + return newBuilder().setProject(project).setLocation(location).setSession(session).build(); + } + + public static String format(String project, String location, String session) { + return newBuilder() + .setProject(project) + .setLocation(location) + .setSession(session) + .build() + .toString(); + } + + public static ReadSessionName parse(String formattedString) { + if (formattedString.isEmpty()) { + return null; + } + Map matchMap = + PATH_TEMPLATE.validatedMatch( + formattedString, "ReadSessionName.parse: formattedString not in valid format"); + return of(matchMap.get("project"), matchMap.get("location"), matchMap.get("session")); + } + + public static List parseList(List formattedStrings) { + List list = new ArrayList<>(formattedStrings.size()); + for (String formattedString : formattedStrings) { + list.add(parse(formattedString)); + } + return list; + } + + public static List toStringList(List values) { + List list = new ArrayList(values.size()); + for (ReadSessionName value : values) { + if (value == null) { + list.add(""); + } else { + list.add(value.toString()); + } + } + return list; + } + + public static boolean isParsableFrom(String formattedString) { + return PATH_TEMPLATE.matches(formattedString); + } + + public Map getFieldValuesMap() { + if (fieldValuesMap == null) { + synchronized (this) { + if (fieldValuesMap == null) { + ImmutableMap.Builder fieldMapBuilder = ImmutableMap.builder(); + fieldMapBuilder.put("project", project); + fieldMapBuilder.put("location", location); + fieldMapBuilder.put("session", session); + fieldValuesMap = fieldMapBuilder.build(); + } + } + } + return fieldValuesMap; + } + + public String getFieldValue(String fieldName) { + return getFieldValuesMap().get(fieldName); + } + + @Override + public String toString() { + return PATH_TEMPLATE.instantiate("project", project, "location", location, "session", session); + } + + /** Builder for ReadSessionName. */ + public static class Builder { + + private String project; + private String location; + private String session; + + public String getProject() { + return project; + } + + public String getLocation() { + return location; + } + + public String getSession() { + return session; + } + + public Builder setProject(String project) { + this.project = project; + return this; + } + + public Builder setLocation(String location) { + this.location = location; + return this; + } + + public Builder setSession(String session) { + this.session = session; + return this; + } + + private Builder() {} + + private Builder(ReadSessionName readSessionName) { + project = readSessionName.project; + location = readSessionName.location; + session = readSessionName.session; + } + + public ReadSessionName build() { + return new ReadSessionName(this); + } + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o instanceof ReadSessionName) { + ReadSessionName that = (ReadSessionName) o; + return (this.project.equals(that.project)) + && (this.location.equals(that.location)) + && (this.session.equals(that.session)); + } + return false; + } + + @Override + public int hashCode() { + int h = 1; + h *= 1000003; + h ^= project.hashCode(); + h *= 1000003; + h ^= location.hashCode(); + h *= 1000003; + h ^= session.hashCode(); + return h; + } +} diff --git a/proto-google-cloud-bigquerystorage-v1/src/main/java/com/google/cloud/bigquery/storage/v1/ReadStreamName.java b/proto-google-cloud-bigquerystorage-v1/src/main/java/com/google/cloud/bigquery/storage/v1/ReadStreamName.java index 8c68ce74b7..9c8236c663 100644 --- a/proto-google-cloud-bigquerystorage-v1/src/main/java/com/google/cloud/bigquery/storage/v1/ReadStreamName.java +++ b/proto-google-cloud-bigquerystorage-v1/src/main/java/com/google/cloud/bigquery/storage/v1/ReadStreamName.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -23,36 +23,22 @@ import java.util.ArrayList; import java.util.List; import java.util.Map; -import java.util.Objects; -import javax.annotation.Generated; -// AUTO-GENERATED DOCUMENTATION AND CLASS. -@Generated("by gapic-generator-java") +/** AUTO-GENERATED DOCUMENTATION AND CLASS */ +@javax.annotation.Generated("by GAPIC protoc plugin") public class ReadStreamName implements ResourceName { - private static final PathTemplate PROJECT_LOCATION_SESSION_STREAM = + + private static final PathTemplate PATH_TEMPLATE = PathTemplate.createWithoutUrlEncoding( "projects/{project}/locations/{location}/sessions/{session}/streams/{stream}"); + private volatile Map fieldValuesMap; + private final String project; private final String location; private final String session; private final String stream; - @Deprecated - protected ReadStreamName() { - project = null; - location = null; - session = null; - stream = null; - } - - private ReadStreamName(Builder builder) { - project = Preconditions.checkNotNull(builder.getProject()); - location = Preconditions.checkNotNull(builder.getLocation()); - session = Preconditions.checkNotNull(builder.getSession()); - stream = Preconditions.checkNotNull(builder.getStream()); - } - public String getProject() { return project; } @@ -77,6 +63,13 @@ public Builder toBuilder() { return new Builder(this); } + private ReadStreamName(Builder builder) { + project = Preconditions.checkNotNull(builder.getProject()); + location = Preconditions.checkNotNull(builder.getLocation()); + session = Preconditions.checkNotNull(builder.getSession()); + stream = Preconditions.checkNotNull(builder.getStream()); + } + public static ReadStreamName of(String project, String location, String session, String stream) { return newBuilder() .setProject(project) @@ -101,7 +94,7 @@ public static ReadStreamName parse(String formattedString) { return null; } Map matchMap = - PROJECT_LOCATION_SESSION_STREAM.validatedMatch( + PATH_TEMPLATE.validatedMatch( formattedString, "ReadStreamName.parse: formattedString not in valid format"); return of( matchMap.get("project"), @@ -119,7 +112,7 @@ public static List parseList(List formattedStrings) { } public static List toStringList(List values) { - List list = new ArrayList<>(values.size()); + List list = new ArrayList(values.size()); for (ReadStreamName value : values) { if (value == null) { list.add(""); @@ -131,27 +124,18 @@ public static List toStringList(List values) { } public static boolean isParsableFrom(String formattedString) { - return PROJECT_LOCATION_SESSION_STREAM.matches(formattedString); + return PATH_TEMPLATE.matches(formattedString); } - @Override public Map getFieldValuesMap() { if (fieldValuesMap == null) { synchronized (this) { if (fieldValuesMap == null) { ImmutableMap.Builder fieldMapBuilder = ImmutableMap.builder(); - if (project != null) { - fieldMapBuilder.put("project", project); - } - if (location != null) { - fieldMapBuilder.put("location", location); - } - if (session != null) { - fieldMapBuilder.put("session", session); - } - if (stream != null) { - fieldMapBuilder.put("stream", stream); - } + fieldMapBuilder.put("project", project); + fieldMapBuilder.put("location", location); + fieldMapBuilder.put("session", session); + fieldMapBuilder.put("stream", stream); fieldValuesMap = fieldMapBuilder.build(); } } @@ -165,48 +149,18 @@ public String getFieldValue(String fieldName) { @Override public String toString() { - return PROJECT_LOCATION_SESSION_STREAM.instantiate( + return PATH_TEMPLATE.instantiate( "project", project, "location", location, "session", session, "stream", stream); } - @Override - public boolean equals(Object o) { - if (o == this) { - return true; - } - if (o != null || getClass() == o.getClass()) { - ReadStreamName that = ((ReadStreamName) o); - return Objects.equals(this.project, that.project) - && Objects.equals(this.location, that.location) - && Objects.equals(this.session, that.session) - && Objects.equals(this.stream, that.stream); - } - return false; - } - - @Override - public int hashCode() { - int h = 1; - h *= 1000003; - h ^= Objects.hashCode(project); - h *= 1000003; - h ^= Objects.hashCode(location); - h *= 1000003; - h ^= Objects.hashCode(session); - h *= 1000003; - h ^= Objects.hashCode(stream); - return h; - } - - /** Builder for projects/{project}/locations/{location}/sessions/{session}/streams/{stream}. */ + /** Builder for ReadStreamName. */ public static class Builder { + private String project; private String location; private String session; private String stream; - protected Builder() {} - public String getProject() { return project; } @@ -243,6 +197,8 @@ public Builder setStream(String stream) { return this; } + private Builder() {} + private Builder(ReadStreamName readStreamName) { project = readStreamName.project; location = readStreamName.location; @@ -254,4 +210,33 @@ public ReadStreamName build() { return new ReadStreamName(this); } } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o instanceof ReadStreamName) { + ReadStreamName that = (ReadStreamName) o; + return (this.project.equals(that.project)) + && (this.location.equals(that.location)) + && (this.session.equals(that.session)) + && (this.stream.equals(that.stream)); + } + return false; + } + + @Override + public int hashCode() { + int h = 1; + h *= 1000003; + h ^= project.hashCode(); + h *= 1000003; + h ^= location.hashCode(); + h *= 1000003; + h ^= session.hashCode(); + h *= 1000003; + h ^= stream.hashCode(); + return h; + } } diff --git a/proto-google-cloud-bigquerystorage-v1/src/main/java/com/google/cloud/bigquery/storage/v1/TableName.java b/proto-google-cloud-bigquerystorage-v1/src/main/java/com/google/cloud/bigquery/storage/v1/TableName.java new file mode 100644 index 0000000000..4478859799 --- /dev/null +++ b/proto-google-cloud-bigquerystorage-v1/src/main/java/com/google/cloud/bigquery/storage/v1/TableName.java @@ -0,0 +1,204 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.storage.v1; + +import com.google.api.pathtemplate.PathTemplate; +import com.google.api.resourcenames.ResourceName; +import com.google.common.base.Preconditions; +import com.google.common.collect.ImmutableMap; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; + +/** AUTO-GENERATED DOCUMENTATION AND CLASS */ +@javax.annotation.Generated("by GAPIC protoc plugin") +public class TableName implements ResourceName { + + private static final PathTemplate PATH_TEMPLATE = + PathTemplate.createWithoutUrlEncoding("projects/{project}/datasets/{dataset}/tables/{table}"); + + private volatile Map fieldValuesMap; + + private final String project; + private final String dataset; + private final String table; + + public String getProject() { + return project; + } + + public String getDataset() { + return dataset; + } + + public String getTable() { + return table; + } + + public static Builder newBuilder() { + return new Builder(); + } + + public Builder toBuilder() { + return new Builder(this); + } + + private TableName(Builder builder) { + project = Preconditions.checkNotNull(builder.getProject()); + dataset = Preconditions.checkNotNull(builder.getDataset()); + table = Preconditions.checkNotNull(builder.getTable()); + } + + public static TableName of(String project, String dataset, String table) { + return newBuilder().setProject(project).setDataset(dataset).setTable(table).build(); + } + + public static String format(String project, String dataset, String table) { + return newBuilder().setProject(project).setDataset(dataset).setTable(table).build().toString(); + } + + public static TableName parse(String formattedString) { + if (formattedString.isEmpty()) { + return null; + } + Map matchMap = + PATH_TEMPLATE.validatedMatch( + formattedString, "TableName.parse: formattedString not in valid format"); + return of(matchMap.get("project"), matchMap.get("dataset"), matchMap.get("table")); + } + + public static List parseList(List formattedStrings) { + List list = new ArrayList<>(formattedStrings.size()); + for (String formattedString : formattedStrings) { + list.add(parse(formattedString)); + } + return list; + } + + public static List toStringList(List values) { + List list = new ArrayList(values.size()); + for (TableName value : values) { + if (value == null) { + list.add(""); + } else { + list.add(value.toString()); + } + } + return list; + } + + public static boolean isParsableFrom(String formattedString) { + return PATH_TEMPLATE.matches(formattedString); + } + + public Map getFieldValuesMap() { + if (fieldValuesMap == null) { + synchronized (this) { + if (fieldValuesMap == null) { + ImmutableMap.Builder fieldMapBuilder = ImmutableMap.builder(); + fieldMapBuilder.put("project", project); + fieldMapBuilder.put("dataset", dataset); + fieldMapBuilder.put("table", table); + fieldValuesMap = fieldMapBuilder.build(); + } + } + } + return fieldValuesMap; + } + + public String getFieldValue(String fieldName) { + return getFieldValuesMap().get(fieldName); + } + + @Override + public String toString() { + return PATH_TEMPLATE.instantiate("project", project, "dataset", dataset, "table", table); + } + + /** Builder for TableName. */ + public static class Builder { + + private String project; + private String dataset; + private String table; + + public String getProject() { + return project; + } + + public String getDataset() { + return dataset; + } + + public String getTable() { + return table; + } + + public Builder setProject(String project) { + this.project = project; + return this; + } + + public Builder setDataset(String dataset) { + this.dataset = dataset; + return this; + } + + public Builder setTable(String table) { + this.table = table; + return this; + } + + private Builder() {} + + private Builder(TableName tableName) { + project = tableName.project; + dataset = tableName.dataset; + table = tableName.table; + } + + public TableName build() { + return new TableName(this); + } + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o instanceof TableName) { + TableName that = (TableName) o; + return (this.project.equals(that.project)) + && (this.dataset.equals(that.dataset)) + && (this.table.equals(that.table)); + } + return false; + } + + @Override + public int hashCode() { + int h = 1; + h *= 1000003; + h ^= project.hashCode(); + h *= 1000003; + h ^= dataset.hashCode(); + h *= 1000003; + h ^= table.hashCode(); + return h; + } +} diff --git a/proto-google-cloud-bigquerystorage-v1alpha2/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/TableName.java b/proto-google-cloud-bigquerystorage-v1alpha2/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/TableName.java index 2a1d43d92b..47ab519a3a 100644 --- a/proto-google-cloud-bigquerystorage-v1alpha2/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/TableName.java +++ b/proto-google-cloud-bigquerystorage-v1alpha2/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/TableName.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -23,32 +23,20 @@ import java.util.ArrayList; import java.util.List; import java.util.Map; -import java.util.Objects; -import javax.annotation.Generated; -// AUTO-GENERATED DOCUMENTATION AND CLASS. -@Generated("by gapic-generator-java") +/** AUTO-GENERATED DOCUMENTATION AND CLASS */ +@javax.annotation.Generated("by GAPIC protoc plugin") public class TableName implements ResourceName { - private static final PathTemplate PROJECT_DATASET_TABLE = + + private static final PathTemplate PATH_TEMPLATE = PathTemplate.createWithoutUrlEncoding("projects/{project}/datasets/{dataset}/tables/{table}"); + private volatile Map fieldValuesMap; + private final String project; private final String dataset; private final String table; - @Deprecated - protected TableName() { - project = null; - dataset = null; - table = null; - } - - private TableName(Builder builder) { - project = Preconditions.checkNotNull(builder.getProject()); - dataset = Preconditions.checkNotNull(builder.getDataset()); - table = Preconditions.checkNotNull(builder.getTable()); - } - public String getProject() { return project; } @@ -69,6 +57,12 @@ public Builder toBuilder() { return new Builder(this); } + private TableName(Builder builder) { + project = Preconditions.checkNotNull(builder.getProject()); + dataset = Preconditions.checkNotNull(builder.getDataset()); + table = Preconditions.checkNotNull(builder.getTable()); + } + public static TableName of(String project, String dataset, String table) { return newBuilder().setProject(project).setDataset(dataset).setTable(table).build(); } @@ -82,7 +76,7 @@ public static TableName parse(String formattedString) { return null; } Map matchMap = - PROJECT_DATASET_TABLE.validatedMatch( + PATH_TEMPLATE.validatedMatch( formattedString, "TableName.parse: formattedString not in valid format"); return of(matchMap.get("project"), matchMap.get("dataset"), matchMap.get("table")); } @@ -96,7 +90,7 @@ public static List parseList(List formattedStrings) { } public static List toStringList(List values) { - List list = new ArrayList<>(values.size()); + List list = new ArrayList(values.size()); for (TableName value : values) { if (value == null) { list.add(""); @@ -108,24 +102,17 @@ public static List toStringList(List values) { } public static boolean isParsableFrom(String formattedString) { - return PROJECT_DATASET_TABLE.matches(formattedString); + return PATH_TEMPLATE.matches(formattedString); } - @Override public Map getFieldValuesMap() { if (fieldValuesMap == null) { synchronized (this) { if (fieldValuesMap == null) { ImmutableMap.Builder fieldMapBuilder = ImmutableMap.builder(); - if (project != null) { - fieldMapBuilder.put("project", project); - } - if (dataset != null) { - fieldMapBuilder.put("dataset", dataset); - } - if (table != null) { - fieldMapBuilder.put("table", table); - } + fieldMapBuilder.put("project", project); + fieldMapBuilder.put("dataset", dataset); + fieldMapBuilder.put("table", table); fieldValuesMap = fieldMapBuilder.build(); } } @@ -139,44 +126,16 @@ public String getFieldValue(String fieldName) { @Override public String toString() { - return PROJECT_DATASET_TABLE.instantiate( - "project", project, "dataset", dataset, "table", table); - } - - @Override - public boolean equals(Object o) { - if (o == this) { - return true; - } - if (o != null || getClass() == o.getClass()) { - TableName that = ((TableName) o); - return Objects.equals(this.project, that.project) - && Objects.equals(this.dataset, that.dataset) - && Objects.equals(this.table, that.table); - } - return false; + return PATH_TEMPLATE.instantiate("project", project, "dataset", dataset, "table", table); } - @Override - public int hashCode() { - int h = 1; - h *= 1000003; - h ^= Objects.hashCode(project); - h *= 1000003; - h ^= Objects.hashCode(dataset); - h *= 1000003; - h ^= Objects.hashCode(table); - return h; - } - - /** Builder for projects/{project}/datasets/{dataset}/tables/{table}. */ + /** Builder for TableName. */ public static class Builder { + private String project; private String dataset; private String table; - protected Builder() {} - public String getProject() { return project; } @@ -204,6 +163,8 @@ public Builder setTable(String table) { return this; } + private Builder() {} + private Builder(TableName tableName) { project = tableName.project; dataset = tableName.dataset; @@ -214,4 +175,30 @@ public TableName build() { return new TableName(this); } } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o instanceof TableName) { + TableName that = (TableName) o; + return (this.project.equals(that.project)) + && (this.dataset.equals(that.dataset)) + && (this.table.equals(that.table)); + } + return false; + } + + @Override + public int hashCode() { + int h = 1; + h *= 1000003; + h ^= project.hashCode(); + h *= 1000003; + h ^= dataset.hashCode(); + h *= 1000003; + h ^= table.hashCode(); + return h; + } } diff --git a/proto-google-cloud-bigquerystorage-v1alpha2/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/WriteStreamName.java b/proto-google-cloud-bigquerystorage-v1alpha2/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/WriteStreamName.java index 7b2430f06a..dbc4bd48a7 100644 --- a/proto-google-cloud-bigquerystorage-v1alpha2/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/WriteStreamName.java +++ b/proto-google-cloud-bigquerystorage-v1alpha2/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/WriteStreamName.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -23,36 +23,22 @@ import java.util.ArrayList; import java.util.List; import java.util.Map; -import java.util.Objects; -import javax.annotation.Generated; -// AUTO-GENERATED DOCUMENTATION AND CLASS. -@Generated("by gapic-generator-java") +/** AUTO-GENERATED DOCUMENTATION AND CLASS */ +@javax.annotation.Generated("by GAPIC protoc plugin") public class WriteStreamName implements ResourceName { - private static final PathTemplate PROJECT_DATASET_TABLE_STREAM = + + private static final PathTemplate PATH_TEMPLATE = PathTemplate.createWithoutUrlEncoding( "projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}"); + private volatile Map fieldValuesMap; + private final String project; private final String dataset; private final String table; private final String stream; - @Deprecated - protected WriteStreamName() { - project = null; - dataset = null; - table = null; - stream = null; - } - - private WriteStreamName(Builder builder) { - project = Preconditions.checkNotNull(builder.getProject()); - dataset = Preconditions.checkNotNull(builder.getDataset()); - table = Preconditions.checkNotNull(builder.getTable()); - stream = Preconditions.checkNotNull(builder.getStream()); - } - public String getProject() { return project; } @@ -77,6 +63,13 @@ public Builder toBuilder() { return new Builder(this); } + private WriteStreamName(Builder builder) { + project = Preconditions.checkNotNull(builder.getProject()); + dataset = Preconditions.checkNotNull(builder.getDataset()); + table = Preconditions.checkNotNull(builder.getTable()); + stream = Preconditions.checkNotNull(builder.getStream()); + } + public static WriteStreamName of(String project, String dataset, String table, String stream) { return newBuilder() .setProject(project) @@ -101,7 +94,7 @@ public static WriteStreamName parse(String formattedString) { return null; } Map matchMap = - PROJECT_DATASET_TABLE_STREAM.validatedMatch( + PATH_TEMPLATE.validatedMatch( formattedString, "WriteStreamName.parse: formattedString not in valid format"); return of( matchMap.get("project"), @@ -119,7 +112,7 @@ public static List parseList(List formattedStrings) { } public static List toStringList(List values) { - List list = new ArrayList<>(values.size()); + List list = new ArrayList(values.size()); for (WriteStreamName value : values) { if (value == null) { list.add(""); @@ -131,27 +124,18 @@ public static List toStringList(List values) { } public static boolean isParsableFrom(String formattedString) { - return PROJECT_DATASET_TABLE_STREAM.matches(formattedString); + return PATH_TEMPLATE.matches(formattedString); } - @Override public Map getFieldValuesMap() { if (fieldValuesMap == null) { synchronized (this) { if (fieldValuesMap == null) { ImmutableMap.Builder fieldMapBuilder = ImmutableMap.builder(); - if (project != null) { - fieldMapBuilder.put("project", project); - } - if (dataset != null) { - fieldMapBuilder.put("dataset", dataset); - } - if (table != null) { - fieldMapBuilder.put("table", table); - } - if (stream != null) { - fieldMapBuilder.put("stream", stream); - } + fieldMapBuilder.put("project", project); + fieldMapBuilder.put("dataset", dataset); + fieldMapBuilder.put("table", table); + fieldMapBuilder.put("stream", stream); fieldValuesMap = fieldMapBuilder.build(); } } @@ -165,48 +149,18 @@ public String getFieldValue(String fieldName) { @Override public String toString() { - return PROJECT_DATASET_TABLE_STREAM.instantiate( + return PATH_TEMPLATE.instantiate( "project", project, "dataset", dataset, "table", table, "stream", stream); } - @Override - public boolean equals(Object o) { - if (o == this) { - return true; - } - if (o != null || getClass() == o.getClass()) { - WriteStreamName that = ((WriteStreamName) o); - return Objects.equals(this.project, that.project) - && Objects.equals(this.dataset, that.dataset) - && Objects.equals(this.table, that.table) - && Objects.equals(this.stream, that.stream); - } - return false; - } - - @Override - public int hashCode() { - int h = 1; - h *= 1000003; - h ^= Objects.hashCode(project); - h *= 1000003; - h ^= Objects.hashCode(dataset); - h *= 1000003; - h ^= Objects.hashCode(table); - h *= 1000003; - h ^= Objects.hashCode(stream); - return h; - } - - /** Builder for projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}. */ + /** Builder for WriteStreamName. */ public static class Builder { + private String project; private String dataset; private String table; private String stream; - protected Builder() {} - public String getProject() { return project; } @@ -243,6 +197,8 @@ public Builder setStream(String stream) { return this; } + private Builder() {} + private Builder(WriteStreamName writeStreamName) { project = writeStreamName.project; dataset = writeStreamName.dataset; @@ -254,4 +210,33 @@ public WriteStreamName build() { return new WriteStreamName(this); } } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o instanceof WriteStreamName) { + WriteStreamName that = (WriteStreamName) o; + return (this.project.equals(that.project)) + && (this.dataset.equals(that.dataset)) + && (this.table.equals(that.table)) + && (this.stream.equals(that.stream)); + } + return false; + } + + @Override + public int hashCode() { + int h = 1; + h *= 1000003; + h ^= project.hashCode(); + h *= 1000003; + h ^= dataset.hashCode(); + h *= 1000003; + h ^= table.hashCode(); + h *= 1000003; + h ^= stream.hashCode(); + return h; + } } diff --git a/proto-google-cloud-bigquerystorage-v1beta1/src/main/java/com/google/cloud/bigquery/storage/v1beta1/ProjectName.java b/proto-google-cloud-bigquerystorage-v1beta1/src/main/java/com/google/cloud/bigquery/storage/v1beta1/ProjectName.java index 61336b2620..3ffe5f3360 100644 --- a/proto-google-cloud-bigquerystorage-v1beta1/src/main/java/com/google/cloud/bigquery/storage/v1beta1/ProjectName.java +++ b/proto-google-cloud-bigquerystorage-v1beta1/src/main/java/com/google/cloud/bigquery/storage/v1beta1/ProjectName.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -23,25 +23,17 @@ import java.util.ArrayList; import java.util.List; import java.util.Map; -import java.util.Objects; -import javax.annotation.Generated; -// AUTO-GENERATED DOCUMENTATION AND CLASS. -@Generated("by gapic-generator-java") +/** AUTO-GENERATED DOCUMENTATION AND CLASS */ +@javax.annotation.Generated("by GAPIC protoc plugin") public class ProjectName implements ResourceName { - private static final PathTemplate PROJECT = + + private static final PathTemplate PATH_TEMPLATE = PathTemplate.createWithoutUrlEncoding("projects/{project}"); - private volatile Map fieldValuesMap; - private final String project; - @Deprecated - protected ProjectName() { - project = null; - } + private volatile Map fieldValuesMap; - private ProjectName(Builder builder) { - project = Preconditions.checkNotNull(builder.getProject()); - } + private final String project; public String getProject() { return project; @@ -55,6 +47,10 @@ public Builder toBuilder() { return new Builder(this); } + private ProjectName(Builder builder) { + project = Preconditions.checkNotNull(builder.getProject()); + } + public static ProjectName of(String project) { return newBuilder().setProject(project).build(); } @@ -68,7 +64,7 @@ public static ProjectName parse(String formattedString) { return null; } Map matchMap = - PROJECT.validatedMatch( + PATH_TEMPLATE.validatedMatch( formattedString, "ProjectName.parse: formattedString not in valid format"); return of(matchMap.get("project")); } @@ -82,7 +78,7 @@ public static List parseList(List formattedStrings) { } public static List toStringList(List values) { - List list = new ArrayList<>(values.size()); + List list = new ArrayList(values.size()); for (ProjectName value : values) { if (value == null) { list.add(""); @@ -94,18 +90,15 @@ public static List toStringList(List values) { } public static boolean isParsableFrom(String formattedString) { - return PROJECT.matches(formattedString); + return PATH_TEMPLATE.matches(formattedString); } - @Override public Map getFieldValuesMap() { if (fieldValuesMap == null) { synchronized (this) { if (fieldValuesMap == null) { ImmutableMap.Builder fieldMapBuilder = ImmutableMap.builder(); - if (project != null) { - fieldMapBuilder.put("project", project); - } + fieldMapBuilder.put("project", project); fieldValuesMap = fieldMapBuilder.build(); } } @@ -119,34 +112,13 @@ public String getFieldValue(String fieldName) { @Override public String toString() { - return PROJECT.instantiate("project", project); - } - - @Override - public boolean equals(Object o) { - if (o == this) { - return true; - } - if (o != null || getClass() == o.getClass()) { - ProjectName that = ((ProjectName) o); - return Objects.equals(this.project, that.project); - } - return false; + return PATH_TEMPLATE.instantiate("project", project); } - @Override - public int hashCode() { - int h = 1; - h *= 1000003; - h ^= Objects.hashCode(project); - return h; - } - - /** Builder for projects/{project}. */ + /** Builder for ProjectName. */ public static class Builder { - private String project; - protected Builder() {} + private String project; public String getProject() { return project; @@ -157,6 +129,8 @@ public Builder setProject(String project) { return this; } + private Builder() {} + private Builder(ProjectName projectName) { project = projectName.project; } @@ -165,4 +139,24 @@ public ProjectName build() { return new ProjectName(this); } } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o instanceof ProjectName) { + ProjectName that = (ProjectName) o; + return (this.project.equals(that.project)); + } + return false; + } + + @Override + public int hashCode() { + int h = 1; + h *= 1000003; + h ^= project.hashCode(); + return h; + } } diff --git a/proto-google-cloud-bigquerystorage-v1beta1/src/main/java/com/google/cloud/bigquery/storage/v1beta1/ReadSessionName.java b/proto-google-cloud-bigquerystorage-v1beta1/src/main/java/com/google/cloud/bigquery/storage/v1beta1/ReadSessionName.java new file mode 100644 index 0000000000..c0762f78e5 --- /dev/null +++ b/proto-google-cloud-bigquerystorage-v1beta1/src/main/java/com/google/cloud/bigquery/storage/v1beta1/ReadSessionName.java @@ -0,0 +1,210 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.storage.v1beta1; + +import com.google.api.pathtemplate.PathTemplate; +import com.google.api.resourcenames.ResourceName; +import com.google.common.base.Preconditions; +import com.google.common.collect.ImmutableMap; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; + +/** AUTO-GENERATED DOCUMENTATION AND CLASS */ +@javax.annotation.Generated("by GAPIC protoc plugin") +public class ReadSessionName implements ResourceName { + + private static final PathTemplate PATH_TEMPLATE = + PathTemplate.createWithoutUrlEncoding( + "projects/{project}/locations/{location}/sessions/{session}"); + + private volatile Map fieldValuesMap; + + private final String project; + private final String location; + private final String session; + + public String getProject() { + return project; + } + + public String getLocation() { + return location; + } + + public String getSession() { + return session; + } + + public static Builder newBuilder() { + return new Builder(); + } + + public Builder toBuilder() { + return new Builder(this); + } + + private ReadSessionName(Builder builder) { + project = Preconditions.checkNotNull(builder.getProject()); + location = Preconditions.checkNotNull(builder.getLocation()); + session = Preconditions.checkNotNull(builder.getSession()); + } + + public static ReadSessionName of(String project, String location, String session) { + return newBuilder().setProject(project).setLocation(location).setSession(session).build(); + } + + public static String format(String project, String location, String session) { + return newBuilder() + .setProject(project) + .setLocation(location) + .setSession(session) + .build() + .toString(); + } + + public static ReadSessionName parse(String formattedString) { + if (formattedString.isEmpty()) { + return null; + } + Map matchMap = + PATH_TEMPLATE.validatedMatch( + formattedString, "ReadSessionName.parse: formattedString not in valid format"); + return of(matchMap.get("project"), matchMap.get("location"), matchMap.get("session")); + } + + public static List parseList(List formattedStrings) { + List list = new ArrayList<>(formattedStrings.size()); + for (String formattedString : formattedStrings) { + list.add(parse(formattedString)); + } + return list; + } + + public static List toStringList(List values) { + List list = new ArrayList(values.size()); + for (ReadSessionName value : values) { + if (value == null) { + list.add(""); + } else { + list.add(value.toString()); + } + } + return list; + } + + public static boolean isParsableFrom(String formattedString) { + return PATH_TEMPLATE.matches(formattedString); + } + + public Map getFieldValuesMap() { + if (fieldValuesMap == null) { + synchronized (this) { + if (fieldValuesMap == null) { + ImmutableMap.Builder fieldMapBuilder = ImmutableMap.builder(); + fieldMapBuilder.put("project", project); + fieldMapBuilder.put("location", location); + fieldMapBuilder.put("session", session); + fieldValuesMap = fieldMapBuilder.build(); + } + } + } + return fieldValuesMap; + } + + public String getFieldValue(String fieldName) { + return getFieldValuesMap().get(fieldName); + } + + @Override + public String toString() { + return PATH_TEMPLATE.instantiate("project", project, "location", location, "session", session); + } + + /** Builder for ReadSessionName. */ + public static class Builder { + + private String project; + private String location; + private String session; + + public String getProject() { + return project; + } + + public String getLocation() { + return location; + } + + public String getSession() { + return session; + } + + public Builder setProject(String project) { + this.project = project; + return this; + } + + public Builder setLocation(String location) { + this.location = location; + return this; + } + + public Builder setSession(String session) { + this.session = session; + return this; + } + + private Builder() {} + + private Builder(ReadSessionName readSessionName) { + project = readSessionName.project; + location = readSessionName.location; + session = readSessionName.session; + } + + public ReadSessionName build() { + return new ReadSessionName(this); + } + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o instanceof ReadSessionName) { + ReadSessionName that = (ReadSessionName) o; + return (this.project.equals(that.project)) + && (this.location.equals(that.location)) + && (this.session.equals(that.session)); + } + return false; + } + + @Override + public int hashCode() { + int h = 1; + h *= 1000003; + h ^= project.hashCode(); + h *= 1000003; + h ^= location.hashCode(); + h *= 1000003; + h ^= session.hashCode(); + return h; + } +} diff --git a/proto-google-cloud-bigquerystorage-v1beta1/src/main/java/com/google/cloud/bigquery/storage/v1beta1/StreamName.java b/proto-google-cloud-bigquerystorage-v1beta1/src/main/java/com/google/cloud/bigquery/storage/v1beta1/StreamName.java new file mode 100644 index 0000000000..a486d4fc85 --- /dev/null +++ b/proto-google-cloud-bigquerystorage-v1beta1/src/main/java/com/google/cloud/bigquery/storage/v1beta1/StreamName.java @@ -0,0 +1,210 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.storage.v1beta1; + +import com.google.api.pathtemplate.PathTemplate; +import com.google.api.resourcenames.ResourceName; +import com.google.common.base.Preconditions; +import com.google.common.collect.ImmutableMap; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; + +/** AUTO-GENERATED DOCUMENTATION AND CLASS */ +@javax.annotation.Generated("by GAPIC protoc plugin") +public class StreamName implements ResourceName { + + private static final PathTemplate PATH_TEMPLATE = + PathTemplate.createWithoutUrlEncoding( + "projects/{project}/locations/{location}/streams/{stream}"); + + private volatile Map fieldValuesMap; + + private final String project; + private final String location; + private final String stream; + + public String getProject() { + return project; + } + + public String getLocation() { + return location; + } + + public String getStream() { + return stream; + } + + public static Builder newBuilder() { + return new Builder(); + } + + public Builder toBuilder() { + return new Builder(this); + } + + private StreamName(Builder builder) { + project = Preconditions.checkNotNull(builder.getProject()); + location = Preconditions.checkNotNull(builder.getLocation()); + stream = Preconditions.checkNotNull(builder.getStream()); + } + + public static StreamName of(String project, String location, String stream) { + return newBuilder().setProject(project).setLocation(location).setStream(stream).build(); + } + + public static String format(String project, String location, String stream) { + return newBuilder() + .setProject(project) + .setLocation(location) + .setStream(stream) + .build() + .toString(); + } + + public static StreamName parse(String formattedString) { + if (formattedString.isEmpty()) { + return null; + } + Map matchMap = + PATH_TEMPLATE.validatedMatch( + formattedString, "StreamName.parse: formattedString not in valid format"); + return of(matchMap.get("project"), matchMap.get("location"), matchMap.get("stream")); + } + + public static List parseList(List formattedStrings) { + List list = new ArrayList<>(formattedStrings.size()); + for (String formattedString : formattedStrings) { + list.add(parse(formattedString)); + } + return list; + } + + public static List toStringList(List values) { + List list = new ArrayList(values.size()); + for (StreamName value : values) { + if (value == null) { + list.add(""); + } else { + list.add(value.toString()); + } + } + return list; + } + + public static boolean isParsableFrom(String formattedString) { + return PATH_TEMPLATE.matches(formattedString); + } + + public Map getFieldValuesMap() { + if (fieldValuesMap == null) { + synchronized (this) { + if (fieldValuesMap == null) { + ImmutableMap.Builder fieldMapBuilder = ImmutableMap.builder(); + fieldMapBuilder.put("project", project); + fieldMapBuilder.put("location", location); + fieldMapBuilder.put("stream", stream); + fieldValuesMap = fieldMapBuilder.build(); + } + } + } + return fieldValuesMap; + } + + public String getFieldValue(String fieldName) { + return getFieldValuesMap().get(fieldName); + } + + @Override + public String toString() { + return PATH_TEMPLATE.instantiate("project", project, "location", location, "stream", stream); + } + + /** Builder for StreamName. */ + public static class Builder { + + private String project; + private String location; + private String stream; + + public String getProject() { + return project; + } + + public String getLocation() { + return location; + } + + public String getStream() { + return stream; + } + + public Builder setProject(String project) { + this.project = project; + return this; + } + + public Builder setLocation(String location) { + this.location = location; + return this; + } + + public Builder setStream(String stream) { + this.stream = stream; + return this; + } + + private Builder() {} + + private Builder(StreamName streamName) { + project = streamName.project; + location = streamName.location; + stream = streamName.stream; + } + + public StreamName build() { + return new StreamName(this); + } + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o instanceof StreamName) { + StreamName that = (StreamName) o; + return (this.project.equals(that.project)) + && (this.location.equals(that.location)) + && (this.stream.equals(that.stream)); + } + return false; + } + + @Override + public int hashCode() { + int h = 1; + h *= 1000003; + h ^= project.hashCode(); + h *= 1000003; + h ^= location.hashCode(); + h *= 1000003; + h ^= stream.hashCode(); + return h; + } +} diff --git a/proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/ProjectName.java b/proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/ProjectName.java index e6400237ee..3ca1b0df14 100644 --- a/proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/ProjectName.java +++ b/proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/ProjectName.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -23,25 +23,17 @@ import java.util.ArrayList; import java.util.List; import java.util.Map; -import java.util.Objects; -import javax.annotation.Generated; -// AUTO-GENERATED DOCUMENTATION AND CLASS. -@Generated("by gapic-generator-java") +/** AUTO-GENERATED DOCUMENTATION AND CLASS */ +@javax.annotation.Generated("by GAPIC protoc plugin") public class ProjectName implements ResourceName { - private static final PathTemplate PROJECT = + + private static final PathTemplate PATH_TEMPLATE = PathTemplate.createWithoutUrlEncoding("projects/{project}"); - private volatile Map fieldValuesMap; - private final String project; - @Deprecated - protected ProjectName() { - project = null; - } + private volatile Map fieldValuesMap; - private ProjectName(Builder builder) { - project = Preconditions.checkNotNull(builder.getProject()); - } + private final String project; public String getProject() { return project; @@ -55,6 +47,10 @@ public Builder toBuilder() { return new Builder(this); } + private ProjectName(Builder builder) { + project = Preconditions.checkNotNull(builder.getProject()); + } + public static ProjectName of(String project) { return newBuilder().setProject(project).build(); } @@ -68,7 +64,7 @@ public static ProjectName parse(String formattedString) { return null; } Map matchMap = - PROJECT.validatedMatch( + PATH_TEMPLATE.validatedMatch( formattedString, "ProjectName.parse: formattedString not in valid format"); return of(matchMap.get("project")); } @@ -82,7 +78,7 @@ public static List parseList(List formattedStrings) { } public static List toStringList(List values) { - List list = new ArrayList<>(values.size()); + List list = new ArrayList(values.size()); for (ProjectName value : values) { if (value == null) { list.add(""); @@ -94,18 +90,15 @@ public static List toStringList(List values) { } public static boolean isParsableFrom(String formattedString) { - return PROJECT.matches(formattedString); + return PATH_TEMPLATE.matches(formattedString); } - @Override public Map getFieldValuesMap() { if (fieldValuesMap == null) { synchronized (this) { if (fieldValuesMap == null) { ImmutableMap.Builder fieldMapBuilder = ImmutableMap.builder(); - if (project != null) { - fieldMapBuilder.put("project", project); - } + fieldMapBuilder.put("project", project); fieldValuesMap = fieldMapBuilder.build(); } } @@ -119,34 +112,13 @@ public String getFieldValue(String fieldName) { @Override public String toString() { - return PROJECT.instantiate("project", project); - } - - @Override - public boolean equals(Object o) { - if (o == this) { - return true; - } - if (o != null || getClass() == o.getClass()) { - ProjectName that = ((ProjectName) o); - return Objects.equals(this.project, that.project); - } - return false; + return PATH_TEMPLATE.instantiate("project", project); } - @Override - public int hashCode() { - int h = 1; - h *= 1000003; - h ^= Objects.hashCode(project); - return h; - } - - /** Builder for projects/{project}. */ + /** Builder for ProjectName. */ public static class Builder { - private String project; - protected Builder() {} + private String project; public String getProject() { return project; @@ -157,6 +129,8 @@ public Builder setProject(String project) { return this; } + private Builder() {} + private Builder(ProjectName projectName) { project = projectName.project; } @@ -165,4 +139,24 @@ public ProjectName build() { return new ProjectName(this); } } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o instanceof ProjectName) { + ProjectName that = (ProjectName) o; + return (this.project.equals(that.project)); + } + return false; + } + + @Override + public int hashCode() { + int h = 1; + h *= 1000003; + h ^= project.hashCode(); + return h; + } } diff --git a/proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/ReadSessionName.java b/proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/ReadSessionName.java new file mode 100644 index 0000000000..46c8731d94 --- /dev/null +++ b/proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/ReadSessionName.java @@ -0,0 +1,210 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.storage.v1beta2; + +import com.google.api.pathtemplate.PathTemplate; +import com.google.api.resourcenames.ResourceName; +import com.google.common.base.Preconditions; +import com.google.common.collect.ImmutableMap; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; + +/** AUTO-GENERATED DOCUMENTATION AND CLASS */ +@javax.annotation.Generated("by GAPIC protoc plugin") +public class ReadSessionName implements ResourceName { + + private static final PathTemplate PATH_TEMPLATE = + PathTemplate.createWithoutUrlEncoding( + "projects/{project}/locations/{location}/sessions/{session}"); + + private volatile Map fieldValuesMap; + + private final String project; + private final String location; + private final String session; + + public String getProject() { + return project; + } + + public String getLocation() { + return location; + } + + public String getSession() { + return session; + } + + public static Builder newBuilder() { + return new Builder(); + } + + public Builder toBuilder() { + return new Builder(this); + } + + private ReadSessionName(Builder builder) { + project = Preconditions.checkNotNull(builder.getProject()); + location = Preconditions.checkNotNull(builder.getLocation()); + session = Preconditions.checkNotNull(builder.getSession()); + } + + public static ReadSessionName of(String project, String location, String session) { + return newBuilder().setProject(project).setLocation(location).setSession(session).build(); + } + + public static String format(String project, String location, String session) { + return newBuilder() + .setProject(project) + .setLocation(location) + .setSession(session) + .build() + .toString(); + } + + public static ReadSessionName parse(String formattedString) { + if (formattedString.isEmpty()) { + return null; + } + Map matchMap = + PATH_TEMPLATE.validatedMatch( + formattedString, "ReadSessionName.parse: formattedString not in valid format"); + return of(matchMap.get("project"), matchMap.get("location"), matchMap.get("session")); + } + + public static List parseList(List formattedStrings) { + List list = new ArrayList<>(formattedStrings.size()); + for (String formattedString : formattedStrings) { + list.add(parse(formattedString)); + } + return list; + } + + public static List toStringList(List values) { + List list = new ArrayList(values.size()); + for (ReadSessionName value : values) { + if (value == null) { + list.add(""); + } else { + list.add(value.toString()); + } + } + return list; + } + + public static boolean isParsableFrom(String formattedString) { + return PATH_TEMPLATE.matches(formattedString); + } + + public Map getFieldValuesMap() { + if (fieldValuesMap == null) { + synchronized (this) { + if (fieldValuesMap == null) { + ImmutableMap.Builder fieldMapBuilder = ImmutableMap.builder(); + fieldMapBuilder.put("project", project); + fieldMapBuilder.put("location", location); + fieldMapBuilder.put("session", session); + fieldValuesMap = fieldMapBuilder.build(); + } + } + } + return fieldValuesMap; + } + + public String getFieldValue(String fieldName) { + return getFieldValuesMap().get(fieldName); + } + + @Override + public String toString() { + return PATH_TEMPLATE.instantiate("project", project, "location", location, "session", session); + } + + /** Builder for ReadSessionName. */ + public static class Builder { + + private String project; + private String location; + private String session; + + public String getProject() { + return project; + } + + public String getLocation() { + return location; + } + + public String getSession() { + return session; + } + + public Builder setProject(String project) { + this.project = project; + return this; + } + + public Builder setLocation(String location) { + this.location = location; + return this; + } + + public Builder setSession(String session) { + this.session = session; + return this; + } + + private Builder() {} + + private Builder(ReadSessionName readSessionName) { + project = readSessionName.project; + location = readSessionName.location; + session = readSessionName.session; + } + + public ReadSessionName build() { + return new ReadSessionName(this); + } + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o instanceof ReadSessionName) { + ReadSessionName that = (ReadSessionName) o; + return (this.project.equals(that.project)) + && (this.location.equals(that.location)) + && (this.session.equals(that.session)); + } + return false; + } + + @Override + public int hashCode() { + int h = 1; + h *= 1000003; + h ^= project.hashCode(); + h *= 1000003; + h ^= location.hashCode(); + h *= 1000003; + h ^= session.hashCode(); + return h; + } +} diff --git a/proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/ReadStreamName.java b/proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/ReadStreamName.java index eab784f8ac..cd616ebba7 100644 --- a/proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/ReadStreamName.java +++ b/proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/ReadStreamName.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -23,36 +23,22 @@ import java.util.ArrayList; import java.util.List; import java.util.Map; -import java.util.Objects; -import javax.annotation.Generated; -// AUTO-GENERATED DOCUMENTATION AND CLASS. -@Generated("by gapic-generator-java") +/** AUTO-GENERATED DOCUMENTATION AND CLASS */ +@javax.annotation.Generated("by GAPIC protoc plugin") public class ReadStreamName implements ResourceName { - private static final PathTemplate PROJECT_LOCATION_SESSION_STREAM = + + private static final PathTemplate PATH_TEMPLATE = PathTemplate.createWithoutUrlEncoding( "projects/{project}/locations/{location}/sessions/{session}/streams/{stream}"); + private volatile Map fieldValuesMap; + private final String project; private final String location; private final String session; private final String stream; - @Deprecated - protected ReadStreamName() { - project = null; - location = null; - session = null; - stream = null; - } - - private ReadStreamName(Builder builder) { - project = Preconditions.checkNotNull(builder.getProject()); - location = Preconditions.checkNotNull(builder.getLocation()); - session = Preconditions.checkNotNull(builder.getSession()); - stream = Preconditions.checkNotNull(builder.getStream()); - } - public String getProject() { return project; } @@ -77,6 +63,13 @@ public Builder toBuilder() { return new Builder(this); } + private ReadStreamName(Builder builder) { + project = Preconditions.checkNotNull(builder.getProject()); + location = Preconditions.checkNotNull(builder.getLocation()); + session = Preconditions.checkNotNull(builder.getSession()); + stream = Preconditions.checkNotNull(builder.getStream()); + } + public static ReadStreamName of(String project, String location, String session, String stream) { return newBuilder() .setProject(project) @@ -101,7 +94,7 @@ public static ReadStreamName parse(String formattedString) { return null; } Map matchMap = - PROJECT_LOCATION_SESSION_STREAM.validatedMatch( + PATH_TEMPLATE.validatedMatch( formattedString, "ReadStreamName.parse: formattedString not in valid format"); return of( matchMap.get("project"), @@ -119,7 +112,7 @@ public static List parseList(List formattedStrings) { } public static List toStringList(List values) { - List list = new ArrayList<>(values.size()); + List list = new ArrayList(values.size()); for (ReadStreamName value : values) { if (value == null) { list.add(""); @@ -131,27 +124,18 @@ public static List toStringList(List values) { } public static boolean isParsableFrom(String formattedString) { - return PROJECT_LOCATION_SESSION_STREAM.matches(formattedString); + return PATH_TEMPLATE.matches(formattedString); } - @Override public Map getFieldValuesMap() { if (fieldValuesMap == null) { synchronized (this) { if (fieldValuesMap == null) { ImmutableMap.Builder fieldMapBuilder = ImmutableMap.builder(); - if (project != null) { - fieldMapBuilder.put("project", project); - } - if (location != null) { - fieldMapBuilder.put("location", location); - } - if (session != null) { - fieldMapBuilder.put("session", session); - } - if (stream != null) { - fieldMapBuilder.put("stream", stream); - } + fieldMapBuilder.put("project", project); + fieldMapBuilder.put("location", location); + fieldMapBuilder.put("session", session); + fieldMapBuilder.put("stream", stream); fieldValuesMap = fieldMapBuilder.build(); } } @@ -165,48 +149,18 @@ public String getFieldValue(String fieldName) { @Override public String toString() { - return PROJECT_LOCATION_SESSION_STREAM.instantiate( + return PATH_TEMPLATE.instantiate( "project", project, "location", location, "session", session, "stream", stream); } - @Override - public boolean equals(Object o) { - if (o == this) { - return true; - } - if (o != null || getClass() == o.getClass()) { - ReadStreamName that = ((ReadStreamName) o); - return Objects.equals(this.project, that.project) - && Objects.equals(this.location, that.location) - && Objects.equals(this.session, that.session) - && Objects.equals(this.stream, that.stream); - } - return false; - } - - @Override - public int hashCode() { - int h = 1; - h *= 1000003; - h ^= Objects.hashCode(project); - h *= 1000003; - h ^= Objects.hashCode(location); - h *= 1000003; - h ^= Objects.hashCode(session); - h *= 1000003; - h ^= Objects.hashCode(stream); - return h; - } - - /** Builder for projects/{project}/locations/{location}/sessions/{session}/streams/{stream}. */ + /** Builder for ReadStreamName. */ public static class Builder { + private String project; private String location; private String session; private String stream; - protected Builder() {} - public String getProject() { return project; } @@ -243,6 +197,8 @@ public Builder setStream(String stream) { return this; } + private Builder() {} + private Builder(ReadStreamName readStreamName) { project = readStreamName.project; location = readStreamName.location; @@ -254,4 +210,33 @@ public ReadStreamName build() { return new ReadStreamName(this); } } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o instanceof ReadStreamName) { + ReadStreamName that = (ReadStreamName) o; + return (this.project.equals(that.project)) + && (this.location.equals(that.location)) + && (this.session.equals(that.session)) + && (this.stream.equals(that.stream)); + } + return false; + } + + @Override + public int hashCode() { + int h = 1; + h *= 1000003; + h ^= project.hashCode(); + h *= 1000003; + h ^= location.hashCode(); + h *= 1000003; + h ^= session.hashCode(); + h *= 1000003; + h ^= stream.hashCode(); + return h; + } } diff --git a/proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/TableName.java b/proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/TableName.java index a90e8b3d16..476d710941 100644 --- a/proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/TableName.java +++ b/proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/TableName.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -23,32 +23,20 @@ import java.util.ArrayList; import java.util.List; import java.util.Map; -import java.util.Objects; -import javax.annotation.Generated; -// AUTO-GENERATED DOCUMENTATION AND CLASS. -@Generated("by gapic-generator-java") +/** AUTO-GENERATED DOCUMENTATION AND CLASS */ +@javax.annotation.Generated("by GAPIC protoc plugin") public class TableName implements ResourceName { - private static final PathTemplate PROJECT_DATASET_TABLE = + + private static final PathTemplate PATH_TEMPLATE = PathTemplate.createWithoutUrlEncoding("projects/{project}/datasets/{dataset}/tables/{table}"); + private volatile Map fieldValuesMap; + private final String project; private final String dataset; private final String table; - @Deprecated - protected TableName() { - project = null; - dataset = null; - table = null; - } - - private TableName(Builder builder) { - project = Preconditions.checkNotNull(builder.getProject()); - dataset = Preconditions.checkNotNull(builder.getDataset()); - table = Preconditions.checkNotNull(builder.getTable()); - } - public String getProject() { return project; } @@ -69,6 +57,12 @@ public Builder toBuilder() { return new Builder(this); } + private TableName(Builder builder) { + project = Preconditions.checkNotNull(builder.getProject()); + dataset = Preconditions.checkNotNull(builder.getDataset()); + table = Preconditions.checkNotNull(builder.getTable()); + } + public static TableName of(String project, String dataset, String table) { return newBuilder().setProject(project).setDataset(dataset).setTable(table).build(); } @@ -82,7 +76,7 @@ public static TableName parse(String formattedString) { return null; } Map matchMap = - PROJECT_DATASET_TABLE.validatedMatch( + PATH_TEMPLATE.validatedMatch( formattedString, "TableName.parse: formattedString not in valid format"); return of(matchMap.get("project"), matchMap.get("dataset"), matchMap.get("table")); } @@ -96,7 +90,7 @@ public static List parseList(List formattedStrings) { } public static List toStringList(List values) { - List list = new ArrayList<>(values.size()); + List list = new ArrayList(values.size()); for (TableName value : values) { if (value == null) { list.add(""); @@ -108,24 +102,17 @@ public static List toStringList(List values) { } public static boolean isParsableFrom(String formattedString) { - return PROJECT_DATASET_TABLE.matches(formattedString); + return PATH_TEMPLATE.matches(formattedString); } - @Override public Map getFieldValuesMap() { if (fieldValuesMap == null) { synchronized (this) { if (fieldValuesMap == null) { ImmutableMap.Builder fieldMapBuilder = ImmutableMap.builder(); - if (project != null) { - fieldMapBuilder.put("project", project); - } - if (dataset != null) { - fieldMapBuilder.put("dataset", dataset); - } - if (table != null) { - fieldMapBuilder.put("table", table); - } + fieldMapBuilder.put("project", project); + fieldMapBuilder.put("dataset", dataset); + fieldMapBuilder.put("table", table); fieldValuesMap = fieldMapBuilder.build(); } } @@ -139,44 +126,16 @@ public String getFieldValue(String fieldName) { @Override public String toString() { - return PROJECT_DATASET_TABLE.instantiate( - "project", project, "dataset", dataset, "table", table); - } - - @Override - public boolean equals(Object o) { - if (o == this) { - return true; - } - if (o != null || getClass() == o.getClass()) { - TableName that = ((TableName) o); - return Objects.equals(this.project, that.project) - && Objects.equals(this.dataset, that.dataset) - && Objects.equals(this.table, that.table); - } - return false; + return PATH_TEMPLATE.instantiate("project", project, "dataset", dataset, "table", table); } - @Override - public int hashCode() { - int h = 1; - h *= 1000003; - h ^= Objects.hashCode(project); - h *= 1000003; - h ^= Objects.hashCode(dataset); - h *= 1000003; - h ^= Objects.hashCode(table); - return h; - } - - /** Builder for projects/{project}/datasets/{dataset}/tables/{table}. */ + /** Builder for TableName. */ public static class Builder { + private String project; private String dataset; private String table; - protected Builder() {} - public String getProject() { return project; } @@ -204,6 +163,8 @@ public Builder setTable(String table) { return this; } + private Builder() {} + private Builder(TableName tableName) { project = tableName.project; dataset = tableName.dataset; @@ -214,4 +175,30 @@ public TableName build() { return new TableName(this); } } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o instanceof TableName) { + TableName that = (TableName) o; + return (this.project.equals(that.project)) + && (this.dataset.equals(that.dataset)) + && (this.table.equals(that.table)); + } + return false; + } + + @Override + public int hashCode() { + int h = 1; + h *= 1000003; + h ^= project.hashCode(); + h *= 1000003; + h ^= dataset.hashCode(); + h *= 1000003; + h ^= table.hashCode(); + return h; + } } diff --git a/proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/WriteStreamName.java b/proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/WriteStreamName.java index b3392aeadc..ba877cc86b 100644 --- a/proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/WriteStreamName.java +++ b/proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/WriteStreamName.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -23,36 +23,22 @@ import java.util.ArrayList; import java.util.List; import java.util.Map; -import java.util.Objects; -import javax.annotation.Generated; -// AUTO-GENERATED DOCUMENTATION AND CLASS. -@Generated("by gapic-generator-java") +/** AUTO-GENERATED DOCUMENTATION AND CLASS */ +@javax.annotation.Generated("by GAPIC protoc plugin") public class WriteStreamName implements ResourceName { - private static final PathTemplate PROJECT_DATASET_TABLE_STREAM = + + private static final PathTemplate PATH_TEMPLATE = PathTemplate.createWithoutUrlEncoding( "projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}"); + private volatile Map fieldValuesMap; + private final String project; private final String dataset; private final String table; private final String stream; - @Deprecated - protected WriteStreamName() { - project = null; - dataset = null; - table = null; - stream = null; - } - - private WriteStreamName(Builder builder) { - project = Preconditions.checkNotNull(builder.getProject()); - dataset = Preconditions.checkNotNull(builder.getDataset()); - table = Preconditions.checkNotNull(builder.getTable()); - stream = Preconditions.checkNotNull(builder.getStream()); - } - public String getProject() { return project; } @@ -77,6 +63,13 @@ public Builder toBuilder() { return new Builder(this); } + private WriteStreamName(Builder builder) { + project = Preconditions.checkNotNull(builder.getProject()); + dataset = Preconditions.checkNotNull(builder.getDataset()); + table = Preconditions.checkNotNull(builder.getTable()); + stream = Preconditions.checkNotNull(builder.getStream()); + } + public static WriteStreamName of(String project, String dataset, String table, String stream) { return newBuilder() .setProject(project) @@ -101,7 +94,7 @@ public static WriteStreamName parse(String formattedString) { return null; } Map matchMap = - PROJECT_DATASET_TABLE_STREAM.validatedMatch( + PATH_TEMPLATE.validatedMatch( formattedString, "WriteStreamName.parse: formattedString not in valid format"); return of( matchMap.get("project"), @@ -119,7 +112,7 @@ public static List parseList(List formattedStrings) { } public static List toStringList(List values) { - List list = new ArrayList<>(values.size()); + List list = new ArrayList(values.size()); for (WriteStreamName value : values) { if (value == null) { list.add(""); @@ -131,27 +124,18 @@ public static List toStringList(List values) { } public static boolean isParsableFrom(String formattedString) { - return PROJECT_DATASET_TABLE_STREAM.matches(formattedString); + return PATH_TEMPLATE.matches(formattedString); } - @Override public Map getFieldValuesMap() { if (fieldValuesMap == null) { synchronized (this) { if (fieldValuesMap == null) { ImmutableMap.Builder fieldMapBuilder = ImmutableMap.builder(); - if (project != null) { - fieldMapBuilder.put("project", project); - } - if (dataset != null) { - fieldMapBuilder.put("dataset", dataset); - } - if (table != null) { - fieldMapBuilder.put("table", table); - } - if (stream != null) { - fieldMapBuilder.put("stream", stream); - } + fieldMapBuilder.put("project", project); + fieldMapBuilder.put("dataset", dataset); + fieldMapBuilder.put("table", table); + fieldMapBuilder.put("stream", stream); fieldValuesMap = fieldMapBuilder.build(); } } @@ -165,48 +149,18 @@ public String getFieldValue(String fieldName) { @Override public String toString() { - return PROJECT_DATASET_TABLE_STREAM.instantiate( + return PATH_TEMPLATE.instantiate( "project", project, "dataset", dataset, "table", table, "stream", stream); } - @Override - public boolean equals(Object o) { - if (o == this) { - return true; - } - if (o != null || getClass() == o.getClass()) { - WriteStreamName that = ((WriteStreamName) o); - return Objects.equals(this.project, that.project) - && Objects.equals(this.dataset, that.dataset) - && Objects.equals(this.table, that.table) - && Objects.equals(this.stream, that.stream); - } - return false; - } - - @Override - public int hashCode() { - int h = 1; - h *= 1000003; - h ^= Objects.hashCode(project); - h *= 1000003; - h ^= Objects.hashCode(dataset); - h *= 1000003; - h ^= Objects.hashCode(table); - h *= 1000003; - h ^= Objects.hashCode(stream); - return h; - } - - /** Builder for projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}. */ + /** Builder for WriteStreamName. */ public static class Builder { + private String project; private String dataset; private String table; private String stream; - protected Builder() {} - public String getProject() { return project; } @@ -243,6 +197,8 @@ public Builder setStream(String stream) { return this; } + private Builder() {} + private Builder(WriteStreamName writeStreamName) { project = writeStreamName.project; dataset = writeStreamName.dataset; @@ -254,4 +210,33 @@ public WriteStreamName build() { return new WriteStreamName(this); } } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o instanceof WriteStreamName) { + WriteStreamName that = (WriteStreamName) o; + return (this.project.equals(that.project)) + && (this.dataset.equals(that.dataset)) + && (this.table.equals(that.table)) + && (this.stream.equals(that.stream)); + } + return false; + } + + @Override + public int hashCode() { + int h = 1; + h *= 1000003; + h ^= project.hashCode(); + h *= 1000003; + h ^= dataset.hashCode(); + h *= 1000003; + h ^= table.hashCode(); + h *= 1000003; + h ^= stream.hashCode(); + return h; + } } diff --git a/synth.metadata b/synth.metadata index 090443985d..f233260b2e 100644 --- a/synth.metadata +++ b/synth.metadata @@ -18,32 +18,32 @@ "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "7be2c821dd88109038c55c89f7dd48f092eeab9d", - "internalRef": "345476969" + "sha": "f8f975c7d43904e90d6c5f1684fdb6804400e641", + "internalRef": "345522380" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "7be2c821dd88109038c55c89f7dd48f092eeab9d", - "internalRef": "345476969" + "sha": "f8f975c7d43904e90d6c5f1684fdb6804400e641", + "internalRef": "345522380" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "7be2c821dd88109038c55c89f7dd48f092eeab9d", - "internalRef": "345476969" + "sha": "f8f975c7d43904e90d6c5f1684fdb6804400e641", + "internalRef": "345522380" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "7be2c821dd88109038c55c89f7dd48f092eeab9d", - "internalRef": "345476969" + "sha": "f8f975c7d43904e90d6c5f1684fdb6804400e641", + "internalRef": "345522380" } }, { @@ -234,6 +234,7 @@ "proto-google-cloud-bigquerystorage-v1/src/main/java/com/google/cloud/bigquery/storage/v1/ReadRowsResponse.java", "proto-google-cloud-bigquerystorage-v1/src/main/java/com/google/cloud/bigquery/storage/v1/ReadRowsResponseOrBuilder.java", "proto-google-cloud-bigquerystorage-v1/src/main/java/com/google/cloud/bigquery/storage/v1/ReadSession.java", + "proto-google-cloud-bigquerystorage-v1/src/main/java/com/google/cloud/bigquery/storage/v1/ReadSessionName.java", "proto-google-cloud-bigquerystorage-v1/src/main/java/com/google/cloud/bigquery/storage/v1/ReadSessionOrBuilder.java", "proto-google-cloud-bigquerystorage-v1/src/main/java/com/google/cloud/bigquery/storage/v1/ReadStream.java", "proto-google-cloud-bigquerystorage-v1/src/main/java/com/google/cloud/bigquery/storage/v1/ReadStreamName.java", @@ -246,6 +247,7 @@ "proto-google-cloud-bigquerystorage-v1/src/main/java/com/google/cloud/bigquery/storage/v1/StreamProto.java", "proto-google-cloud-bigquerystorage-v1/src/main/java/com/google/cloud/bigquery/storage/v1/StreamStats.java", "proto-google-cloud-bigquerystorage-v1/src/main/java/com/google/cloud/bigquery/storage/v1/StreamStatsOrBuilder.java", + "proto-google-cloud-bigquerystorage-v1/src/main/java/com/google/cloud/bigquery/storage/v1/TableName.java", "proto-google-cloud-bigquerystorage-v1/src/main/java/com/google/cloud/bigquery/storage/v1/ThrottleState.java", "proto-google-cloud-bigquerystorage-v1/src/main/java/com/google/cloud/bigquery/storage/v1/ThrottleStateOrBuilder.java", "proto-google-cloud-bigquerystorage-v1/src/main/proto/google/cloud/bigquery/storage/v1/arrow.proto", @@ -266,7 +268,9 @@ "proto-google-cloud-bigquerystorage-v1beta1/src/main/java/com/google/cloud/bigquery/storage/v1beta1/AvroProto.java", "proto-google-cloud-bigquerystorage-v1beta1/src/main/java/com/google/cloud/bigquery/storage/v1beta1/ProjectName.java", "proto-google-cloud-bigquerystorage-v1beta1/src/main/java/com/google/cloud/bigquery/storage/v1beta1/ReadOptions.java", + "proto-google-cloud-bigquerystorage-v1beta1/src/main/java/com/google/cloud/bigquery/storage/v1beta1/ReadSessionName.java", "proto-google-cloud-bigquerystorage-v1beta1/src/main/java/com/google/cloud/bigquery/storage/v1beta1/Storage.java", + "proto-google-cloud-bigquerystorage-v1beta1/src/main/java/com/google/cloud/bigquery/storage/v1beta1/StreamName.java", "proto-google-cloud-bigquerystorage-v1beta1/src/main/java/com/google/cloud/bigquery/storage/v1beta1/TableReferenceProto.java", "proto-google-cloud-bigquerystorage-v1beta1/src/main/proto/google/cloud/bigquery/storage/v1beta1/arrow.proto", "proto-google-cloud-bigquerystorage-v1beta1/src/main/proto/google/cloud/bigquery/storage/v1beta1/avro.proto", @@ -319,6 +323,7 @@ "proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/ReadRowsResponse.java", "proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/ReadRowsResponseOrBuilder.java", "proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/ReadSession.java", + "proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/ReadSessionName.java", "proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/ReadSessionOrBuilder.java", "proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/ReadStream.java", "proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/ReadStreamName.java", From ca8c91104567aa848f3da135ddca8ef711e9471d Mon Sep 17 00:00:00 2001 From: yoshi-automation Date: Fri, 11 Dec 2020 16:42:29 -0800 Subject: [PATCH 5/6] chore: migrate java-bigquerystorage to the Java microgenerator Committer: @miraleung PiperOrigin-RevId: 346405446 Source-Author: Google APIs Source-Date: Tue Dec 8 14:03:11 2020 -0800 Source-Repo: googleapis/googleapis Source-Sha: abc43060f136ce77124754a48f367102e646844a Source-Link: https://github.com/googleapis/googleapis/commit/abc43060f136ce77124754a48f367102e646844a --- .../storage/v1/BaseBigQueryReadClient.java | 140 +----- .../storage/v1/BaseBigQueryReadSettings.java | 33 +- .../bigquery/storage/v1/package-info.java | 23 +- .../storage/v1/stub/BigQueryReadStub.java | 9 +- .../v1/stub/BigQueryReadStubSettings.java | 99 ++-- .../stub/GrpcBigQueryReadCallableFactory.java | 40 +- .../storage/v1/stub/GrpcBigQueryReadStub.java | 27 +- .../storage/v1alpha2/BigQueryWriteClient.java | 427 ++++-------------- .../v1alpha2/BigQueryWriteSettings.java | 79 ++-- .../storage/v1alpha2/package-info.java | 20 +- .../v1alpha2/stub/BigQueryWriteStub.java | 37 +- .../stub/BigQueryWriteStubSettings.java | 189 ++++---- .../GrpcBigQueryWriteCallableFactory.java | 40 +- .../v1alpha2/stub/GrpcBigQueryWriteStub.java | 254 ++++++----- .../v1beta1/BaseBigQueryStorageClient.java | 304 +++---------- .../v1beta1/BaseBigQueryStorageSettings.java | 71 +-- .../storage/v1beta1/package-info.java | 23 +- .../v1beta1/stub/BigQueryStorageStub.java | 34 +- .../stub/BigQueryStorageStubSettings.java | 184 ++++---- .../GrpcBigQueryStorageCallableFactory.java | 40 +- .../v1beta1/stub/GrpcBigQueryStorageStub.java | 187 ++++---- .../v1beta2/BaseBigQueryReadClient.java | 140 +----- .../v1beta2/BaseBigQueryReadSettings.java | 33 +- .../storage/v1beta2/BigQueryWriteClient.java | 310 ++----------- .../v1beta2/BigQueryWriteSettings.java | 36 +- .../storage/v1beta2/package-info.java | 35 +- .../v1beta2/stub/BigQueryReadStub.java | 9 +- .../stub/BigQueryReadStubSettings.java | 148 ++---- .../v1beta2/stub/BigQueryWriteStub.java | 9 +- .../stub/BigQueryWriteStubSettings.java | 159 +++---- .../stub/GrpcBigQueryReadCallableFactory.java | 40 +- .../v1beta2/stub/GrpcBigQueryReadStub.java | 27 +- .../GrpcBigQueryWriteCallableFactory.java | 40 +- .../v1beta2/stub/GrpcBigQueryWriteStub.java | 39 +- .../v1/BaseBigQueryReadClientTest.java | 152 +++++-- .../bigquery/storage/v1/MockBigQueryRead.java | 6 +- .../storage/v1/MockBigQueryReadImpl.java | 18 +- .../v1alpha2/BigQueryWriteClientTest.java | 360 ++++++++++----- .../storage/v1alpha2/MockBigQueryWrite.java | 6 +- .../v1alpha2/MockBigQueryWriteImpl.java | 79 ++-- .../BaseBigQueryStorageClientTest.java | 189 ++++---- .../storage/v1beta1/MockBigQueryStorage.java | 6 +- .../v1beta1/MockBigQueryStorageImpl.java | 58 ++- .../v1beta2/BaseBigQueryReadClientTest.java | 157 +++++-- .../v1beta2/BigQueryWriteClientTest.java | 298 +++++++++--- .../storage/v1beta2/MockBigQueryRead.java | 6 +- .../storage/v1beta2/MockBigQueryReadImpl.java | 18 +- .../storage/v1beta2/MockBigQueryWrite.java | 6 +- .../v1beta2/MockBigQueryWriteImpl.java | 30 +- .../bigquery/storage/v1/ProjectName.java | 86 ++-- .../bigquery/storage/v1/ReadSessionName.java | 210 --------- .../bigquery/storage/v1/ReadStreamName.java | 125 ++--- .../cloud/bigquery/storage/v1/TableName.java | 204 --------- .../bigquery/storage/v1alpha2/TableName.java | 113 +++-- .../storage/v1alpha2/WriteStreamName.java | 125 ++--- .../bigquery/storage/v1beta1/ProjectName.java | 86 ++-- .../storage/v1beta1/ReadSessionName.java | 210 --------- .../bigquery/storage/v1beta1/StreamName.java | 210 --------- .../bigquery/storage/v1beta2/ProjectName.java | 86 ++-- .../storage/v1beta2/ReadSessionName.java | 210 --------- .../storage/v1beta2/ReadStreamName.java | 125 ++--- .../bigquery/storage/v1beta2/TableName.java | 113 +++-- .../storage/v1beta2/WriteStreamName.java | 125 ++--- synth.metadata | 21 +- 64 files changed, 2641 insertions(+), 4082 deletions(-) delete mode 100644 proto-google-cloud-bigquerystorage-v1/src/main/java/com/google/cloud/bigquery/storage/v1/ReadSessionName.java delete mode 100644 proto-google-cloud-bigquerystorage-v1/src/main/java/com/google/cloud/bigquery/storage/v1/TableName.java delete mode 100644 proto-google-cloud-bigquerystorage-v1beta1/src/main/java/com/google/cloud/bigquery/storage/v1beta1/ReadSessionName.java delete mode 100644 proto-google-cloud-bigquerystorage-v1beta1/src/main/java/com/google/cloud/bigquery/storage/v1beta1/StreamName.java delete mode 100644 proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/ReadSessionName.java diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/BaseBigQueryReadClient.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/BaseBigQueryReadClient.java index 3c36401335..37aca0169b 100644 --- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/BaseBigQueryReadClient.java +++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/BaseBigQueryReadClient.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,6 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.bigquery.storage.v1; import com.google.api.core.BetaApi; @@ -25,7 +26,7 @@ import java.util.concurrent.TimeUnit; import javax.annotation.Generated; -// AUTO-GENERATED DOCUMENTATION AND SERVICE +// AUTO-GENERATED DOCUMENTATION AND CLASS. /** * Service Description: BigQuery Read API. * @@ -34,18 +35,7 @@ *

This class provides the ability to make remote calls to the backing service through method * calls that map to API methods. Sample code to get started: * - *

- * 
- * try (BaseBigQueryReadClient baseBigQueryReadClient = BaseBigQueryReadClient.create()) {
- *   ProjectName parent = ProjectName.of("[PROJECT]");
- *   ReadSession readSession = ReadSession.newBuilder().build();
- *   int maxStreamCount = 0;
- *   ReadSession response = baseBigQueryReadClient.createReadSession(parent, readSession, maxStreamCount);
- * }
- * 
- * 
- * - *

Note: close() needs to be called on the baseBigQueryReadClient object to clean up resources + *

Note: close() needs to be called on the BaseBigQueryReadClient object to clean up resources * such as threads. In the example above, try-with-resources is used, which automatically calls * close(). * @@ -74,30 +64,28 @@ * *

To customize credentials: * - *

- * 
+ * 
{@code
  * BaseBigQueryReadSettings baseBigQueryReadSettings =
  *     BaseBigQueryReadSettings.newBuilder()
  *         .setCredentialsProvider(FixedCredentialsProvider.create(myCredentials))
  *         .build();
  * BaseBigQueryReadClient baseBigQueryReadClient =
  *     BaseBigQueryReadClient.create(baseBigQueryReadSettings);
- * 
- * 
+ * }
* - * To customize the endpoint: + *

To customize the endpoint: * - *

- * 
+ * 
{@code
  * BaseBigQueryReadSettings baseBigQueryReadSettings =
  *     BaseBigQueryReadSettings.newBuilder().setEndpoint(myEndpoint).build();
  * BaseBigQueryReadClient baseBigQueryReadClient =
  *     BaseBigQueryReadClient.create(baseBigQueryReadSettings);
- * 
- * 
+ * }
+ * + *

Please refer to the GitHub repository's samples for more quickstart code snippets. */ -@Generated("by gapic-generator") @BetaApi +@Generated("by gapic-generator") public class BaseBigQueryReadClient implements BackgroundResource { private final BaseBigQueryReadSettings settings; private final BigQueryReadStub stub; @@ -118,7 +106,7 @@ public static final BaseBigQueryReadClient create(BaseBigQueryReadSettings setti /** * Constructs an instance of BaseBigQueryReadClient, using the given stub for making calls. This - * is for advanced usage - prefer to use BaseBigQueryReadSettings}. + * is for advanced usage - prefer using create(BaseBigQueryReadSettings). */ @BetaApi("A restructuring of stub classes is planned, so this may break in the future") public static final BaseBigQueryReadClient create(BigQueryReadStub stub) { @@ -150,7 +138,7 @@ public BigQueryReadStub getStub() { return stub; } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Creates a new read session. A read session divides the contents of a BigQuery table into one or * more streams, which can then be used to read data from the table. The read session also @@ -169,17 +157,6 @@ public BigQueryReadStub getStub() { *

Read sessions automatically expire 24 hours after they are created and do not require manual * clean-up by the caller. * - *

Sample code: - * - *


-   * try (BaseBigQueryReadClient baseBigQueryReadClient = BaseBigQueryReadClient.create()) {
-   *   ProjectName parent = ProjectName.of("[PROJECT]");
-   *   ReadSession readSession = ReadSession.newBuilder().build();
-   *   int maxStreamCount = 0;
-   *   ReadSession response = baseBigQueryReadClient.createReadSession(parent, readSession, maxStreamCount);
-   * }
-   * 
- * * @param parent Required. The request project that owns the session, in the form of * `projects/{project_id}`. * @param readSession Required. Session to be created. @@ -202,7 +179,7 @@ public final ReadSession createReadSession( return createReadSession(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Creates a new read session. A read session divides the contents of a BigQuery table into one or * more streams, which can then be used to read data from the table. The read session also @@ -221,17 +198,6 @@ public final ReadSession createReadSession( *

Read sessions automatically expire 24 hours after they are created and do not require manual * clean-up by the caller. * - *

Sample code: - * - *


-   * try (BaseBigQueryReadClient baseBigQueryReadClient = BaseBigQueryReadClient.create()) {
-   *   ProjectName parent = ProjectName.of("[PROJECT]");
-   *   ReadSession readSession = ReadSession.newBuilder().build();
-   *   int maxStreamCount = 0;
-   *   ReadSession response = baseBigQueryReadClient.createReadSession(parent.toString(), readSession, maxStreamCount);
-   * }
-   * 
- * * @param parent Required. The request project that owns the session, in the form of * `projects/{project_id}`. * @param readSession Required. Session to be created. @@ -254,7 +220,7 @@ public final ReadSession createReadSession( return createReadSession(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Creates a new read session. A read session divides the contents of a BigQuery table into one or * more streams, which can then be used to read data from the table. The read session also @@ -273,20 +239,6 @@ public final ReadSession createReadSession( *

Read sessions automatically expire 24 hours after they are created and do not require manual * clean-up by the caller. * - *

Sample code: - * - *


-   * try (BaseBigQueryReadClient baseBigQueryReadClient = BaseBigQueryReadClient.create()) {
-   *   ProjectName parent = ProjectName.of("[PROJECT]");
-   *   ReadSession readSession = ReadSession.newBuilder().build();
-   *   CreateReadSessionRequest request = CreateReadSessionRequest.newBuilder()
-   *     .setParent(parent.toString())
-   *     .setReadSession(readSession)
-   *     .build();
-   *   ReadSession response = baseBigQueryReadClient.createReadSession(request);
-   * }
-   * 
- * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -294,7 +246,7 @@ public final ReadSession createReadSession(CreateReadSessionRequest request) { return createReadSessionCallable().call(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Creates a new read session. A read session divides the contents of a BigQuery table into one or * more streams, which can then be used to read data from the table. The read session also @@ -314,26 +266,12 @@ public final ReadSession createReadSession(CreateReadSessionRequest request) { * clean-up by the caller. * *

Sample code: - * - *


-   * try (BaseBigQueryReadClient baseBigQueryReadClient = BaseBigQueryReadClient.create()) {
-   *   ProjectName parent = ProjectName.of("[PROJECT]");
-   *   ReadSession readSession = ReadSession.newBuilder().build();
-   *   CreateReadSessionRequest request = CreateReadSessionRequest.newBuilder()
-   *     .setParent(parent.toString())
-   *     .setReadSession(readSession)
-   *     .build();
-   *   ApiFuture<ReadSession> future = baseBigQueryReadClient.createReadSessionCallable().futureCall(request);
-   *   // Do something
-   *   ReadSession response = future.get();
-   * }
-   * 
*/ public final UnaryCallable createReadSessionCallable() { return stub.createReadSessionCallable(); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Reads rows from the stream in the format prescribed by the ReadSession. Each response contains * one or more table rows, up to a maximum of 100 MiB per response; read requests which attempt to @@ -343,26 +281,12 @@ public final UnaryCallable createReadSess * stream. * *

Sample code: - * - *


-   * try (BaseBigQueryReadClient baseBigQueryReadClient = BaseBigQueryReadClient.create()) {
-   *   ReadStreamName readStream = ReadStreamName.of("[PROJECT]", "[LOCATION]", "[SESSION]", "[STREAM]");
-   *   ReadRowsRequest request = ReadRowsRequest.newBuilder()
-   *     .setReadStream(readStream.toString())
-   *     .build();
-   *
-   *   ServerStream<ReadRowsResponse> stream = baseBigQueryReadClient.readRowsCallable().call(request);
-   *   for (ReadRowsResponse response : stream) {
-   *     // Do something when receive a response
-   *   }
-   * }
-   * 
*/ public final ServerStreamingCallable readRowsCallable() { return stub.readRowsCallable(); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Splits a given `ReadStream` into two `ReadStream` objects. These `ReadStream` objects are * referred to as the primary and the residual streams of the split. The original `ReadStream` can @@ -375,18 +299,6 @@ public final ServerStreamingCallable readRows * original[0-j] = primary[0-j] and original[j-n] = residual[0-m] once the streams have been read * to completion. * - *

Sample code: - * - *


-   * try (BaseBigQueryReadClient baseBigQueryReadClient = BaseBigQueryReadClient.create()) {
-   *   ReadStreamName name = ReadStreamName.of("[PROJECT]", "[LOCATION]", "[SESSION]", "[STREAM]");
-   *   SplitReadStreamRequest request = SplitReadStreamRequest.newBuilder()
-   *     .setName(name.toString())
-   *     .build();
-   *   SplitReadStreamResponse response = baseBigQueryReadClient.splitReadStream(request);
-   * }
-   * 
- * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -394,7 +306,7 @@ public final SplitReadStreamResponse splitReadStream(SplitReadStreamRequest requ return splitReadStreamCallable().call(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Splits a given `ReadStream` into two `ReadStream` objects. These `ReadStream` objects are * referred to as the primary and the residual streams of the split. The original `ReadStream` can @@ -408,18 +320,6 @@ public final SplitReadStreamResponse splitReadStream(SplitReadStreamRequest requ * to completion. * *

Sample code: - * - *


-   * try (BaseBigQueryReadClient baseBigQueryReadClient = BaseBigQueryReadClient.create()) {
-   *   ReadStreamName name = ReadStreamName.of("[PROJECT]", "[LOCATION]", "[SESSION]", "[STREAM]");
-   *   SplitReadStreamRequest request = SplitReadStreamRequest.newBuilder()
-   *     .setName(name.toString())
-   *     .build();
-   *   ApiFuture<SplitReadStreamResponse> future = baseBigQueryReadClient.splitReadStreamCallable().futureCall(request);
-   *   // Do something
-   *   SplitReadStreamResponse response = future.get();
-   * }
-   * 
*/ public final UnaryCallable splitReadStreamCallable() { diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/BaseBigQueryReadSettings.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/BaseBigQueryReadSettings.java index 7dfff71a93..d9669ef04a 100644 --- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/BaseBigQueryReadSettings.java +++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/BaseBigQueryReadSettings.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,6 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.bigquery.storage.v1; import com.google.api.core.ApiFunction; @@ -31,7 +32,7 @@ import java.util.List; import javax.annotation.Generated; -// AUTO-GENERATED DOCUMENTATION AND CLASS +// AUTO-GENERATED DOCUMENTATION AND CLASS. /** * Settings class to configure an instance of {@link BaseBigQueryReadClient}. * @@ -49,23 +50,24 @@ * *

For example, to set the total timeout of createReadSession to 30 seconds: * - *

- * 
+ * 
{@code
  * BaseBigQueryReadSettings.Builder baseBigQueryReadSettingsBuilder =
  *     BaseBigQueryReadSettings.newBuilder();
  * baseBigQueryReadSettingsBuilder
  *     .createReadSessionSettings()
  *     .setRetrySettings(
- *         baseBigQueryReadSettingsBuilder.createReadSessionSettings().getRetrySettings().toBuilder()
+ *         baseBigQueryReadSettingsBuilder
+ *             .createReadSessionSettings()
+ *             .getRetrySettings()
+ *             .toBuilder()
  *             .setTotalTimeout(Duration.ofSeconds(30))
  *             .build());
  * BaseBigQueryReadSettings baseBigQueryReadSettings = baseBigQueryReadSettingsBuilder.build();
- * 
- * 
+ * }
*/ -@Generated("by gapic-generator") -@BetaApi +@Generated("by gapic-generator-java") public class BaseBigQueryReadSettings extends ClientSettings { + /** Returns the object with the settings used for calls to createReadSession. */ public UnaryCallSettings createReadSessionSettings() { return ((BigQueryReadStubSettings) getStubSettings()).createReadSessionSettings(); @@ -142,18 +144,15 @@ protected BaseBigQueryReadSettings(Builder settingsBuilder) throws IOException { /** Builder for BaseBigQueryReadSettings. */ public static class Builder extends ClientSettings.Builder { + protected Builder() throws IOException { - this((ClientContext) null); + this(((ClientContext) null)); } protected Builder(ClientContext clientContext) { super(BigQueryReadStubSettings.newBuilder(clientContext)); } - private static Builder createDefault() { - return new Builder(BigQueryReadStubSettings.newBuilder()); - } - protected Builder(BaseBigQueryReadSettings settings) { super(settings.getStubSettings().toBuilder()); } @@ -162,11 +161,15 @@ protected Builder(BigQueryReadStubSettings.Builder stubSettings) { super(stubSettings); } + private static Builder createDefault() { + return new Builder(BigQueryReadStubSettings.newBuilder()); + } + public BigQueryReadStubSettings.Builder getStubSettingsBuilder() { return ((BigQueryReadStubSettings.Builder) getStubSettings()); } - // NEXT_MAJOR_VER: remove 'throws Exception' + // NEXT_MAJOR_VER: remove 'throws Exception'. /** * Applies the given settings updater function to all of the unary API methods in this service. * diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/package-info.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/package-info.java index a29e6a13d4..b6a07a3c4a 100644 --- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/package-info.java +++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/package-info.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,30 +15,17 @@ */ /** - * A client to BigQuery Storage API. + * The interfaces provided are listed below, along with usage samples. * - *

The interfaces provided are listed below, along with usage samples. - * - *

====================== BaseBigQueryReadClient ====================== + *

======================= BigQueryReadClient ======================= * *

Service Description: BigQuery Read API. * *

The Read API can be used to read data from BigQuery. * - *

Sample for BaseBigQueryReadClient: - * - *

- * 
- * try (BaseBigQueryReadClient baseBigQueryReadClient = BaseBigQueryReadClient.create()) {
- *   ProjectName parent = ProjectName.of("[PROJECT]");
- *   ReadSession readSession = ReadSession.newBuilder().build();
- *   int maxStreamCount = 0;
- *   ReadSession response = baseBigQueryReadClient.createReadSession(parent, readSession, maxStreamCount);
- * }
- * 
- * 
+ *

Sample for BigQueryReadClient: */ -@Generated("by gapic-generator") +@Generated("by gapic-generator-java") package com.google.cloud.bigquery.storage.v1; import javax.annotation.Generated; diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/stub/BigQueryReadStub.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/stub/BigQueryReadStub.java index 01bff92268..85cb247aaf 100644 --- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/stub/BigQueryReadStub.java +++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/stub/BigQueryReadStub.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,9 +13,9 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.bigquery.storage.v1.stub; -import com.google.api.core.BetaApi; import com.google.api.gax.core.BackgroundResource; import com.google.api.gax.rpc.ServerStreamingCallable; import com.google.api.gax.rpc.UnaryCallable; @@ -27,14 +27,13 @@ import com.google.cloud.bigquery.storage.v1.SplitReadStreamResponse; import javax.annotation.Generated; -// AUTO-GENERATED DOCUMENTATION AND CLASS +// AUTO-GENERATED DOCUMENTATION AND CLASS. /** - * Base stub class for BigQuery Storage API. + * Base stub class for the BigQueryRead service API. * *

This class is for advanced usage and reflects the underlying API directly. */ @Generated("by gapic-generator") -@BetaApi("A restructuring of stub classes is planned, so this may break in the future") public abstract class BigQueryReadStub implements BackgroundResource { public UnaryCallable createReadSessionCallable() { diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/stub/BigQueryReadStubSettings.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/stub/BigQueryReadStubSettings.java index 1b657327c5..643f8c3d21 100644 --- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/stub/BigQueryReadStubSettings.java +++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/stub/BigQueryReadStubSettings.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,6 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.bigquery.storage.v1.stub; import com.google.api.core.ApiFunction; @@ -46,7 +47,7 @@ import javax.annotation.Generated; import org.threeten.bp.Duration; -// AUTO-GENERATED DOCUMENTATION AND CLASS +// AUTO-GENERATED DOCUMENTATION AND CLASS. /** * Settings class to configure an instance of {@link BigQueryReadStub}. * @@ -64,22 +65,23 @@ * *

For example, to set the total timeout of createReadSession to 30 seconds: * - *

- * 
- * BigQueryReadStubSettings.Builder baseBigQueryReadSettingsBuilder =
+ * 
{@code
+ * BigQueryReadStubSettings.Builder bigQueryReadSettingsBuilder =
  *     BigQueryReadStubSettings.newBuilder();
- * baseBigQueryReadSettingsBuilder
+ * bigQueryReadSettingsBuilder
  *     .createReadSessionSettings()
  *     .setRetrySettings(
- *         baseBigQueryReadSettingsBuilder.createReadSessionSettings().getRetrySettings().toBuilder()
+ *         bigQueryReadSettingsBuilder
+ *             .createReadSessionSettings()
+ *             .getRetrySettings()
+ *             .toBuilder()
  *             .setTotalTimeout(Duration.ofSeconds(30))
  *             .build());
- * BigQueryReadStubSettings baseBigQueryReadSettings = baseBigQueryReadSettingsBuilder.build();
- * 
- * 
+ * BigQueryReadStubSettings bigQueryReadSettings = bigQueryReadSettingsBuilder.build(); + * }
*/ -@Generated("by gapic-generator") @BetaApi +@Generated("by gapic-generator-java") public class BigQueryReadStubSettings extends StubSettings { /** The default scopes of the service. */ private static final ImmutableList DEFAULT_SERVICE_SCOPES = @@ -116,10 +118,10 @@ public BigQueryReadStub createStub() throws IOException { .getTransportName() .equals(GrpcTransportChannel.getGrpcTransportName())) { return GrpcBigQueryReadStub.create(this); - } else { - throw new UnsupportedOperationException( - "Transport not supported: " + getTransportChannelProvider().getTransportName()); } + throw new UnsupportedOperationException( + String.format( + "Transport not supported: %s", getTransportChannelProvider().getTransportName())); } /** Returns a builder for the default ExecutorProvider for this service. */ @@ -187,14 +189,12 @@ protected BigQueryReadStubSettings(Builder settingsBuilder) throws IOException { /** Builder for BigQueryReadStubSettings. */ public static class Builder extends StubSettings.Builder { private final ImmutableList> unaryMethodSettingsBuilders; - private final UnaryCallSettings.Builder createReadSessionSettings; private final ServerStreamingCallSettings.Builder readRowsSettings; private final UnaryCallSettings.Builder splitReadStreamSettings; - private static final ImmutableMap> RETRYABLE_CODE_DEFINITIONS; @@ -202,19 +202,18 @@ public static class Builder extends StubSettings.Builder> definitions = ImmutableMap.builder(); definitions.put( - "retry_policy_1_codes", + "retry_policy_0_codes", ImmutableSet.copyOf( Lists.newArrayList( StatusCode.Code.DEADLINE_EXCEEDED, StatusCode.Code.UNAVAILABLE))); - definitions.put("no_retry_codes", ImmutableSet.copyOf(Lists.newArrayList())); definitions.put( - "retry_policy_3_codes", + "retry_policy_1_codes", + ImmutableSet.copyOf(Lists.newArrayList(StatusCode.Code.UNAVAILABLE))); + definitions.put( + "retry_policy_2_codes", ImmutableSet.copyOf( Lists.newArrayList( StatusCode.Code.DEADLINE_EXCEEDED, StatusCode.Code.UNAVAILABLE))); - definitions.put( - "retry_policy_2_codes", - ImmutableSet.copyOf(Lists.newArrayList(StatusCode.Code.UNAVAILABLE))); RETRYABLE_CODE_DEFINITIONS = definitions.build(); } @@ -233,7 +232,7 @@ public static class Builder extends StubSettings.Builder>of( createReadSessionSettings, splitReadStreamSettings); - initDefaults(this); } + protected Builder(BigQueryReadStubSettings settings) { + super(settings); + + createReadSessionSettings = settings.createReadSessionSettings.toBuilder(); + readRowsSettings = settings.readRowsSettings.toBuilder(); + splitReadStreamSettings = settings.splitReadStreamSettings.toBuilder(); + + unaryMethodSettingsBuilders = + ImmutableList.>of( + createReadSessionSettings, splitReadStreamSettings); + } + private static Builder createDefault() { - Builder builder = new Builder((ClientContext) null); + Builder builder = new Builder(((ClientContext) null)); + builder.setTransportChannelProvider(defaultTransportChannelProvider()); builder.setCredentialsProvider(defaultCredentialsProviderBuilder().build()); builder.setInternalHeaderProvider(defaultApiClientHeaderProviderBuilder().build()); builder.setEndpoint(getDefaultEndpoint()); + return initDefaults(builder); } private static Builder initDefaults(Builder builder) { - builder .createReadSessionSettings() - .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_1_codes")) - .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_1_params")); + .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) + .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")); builder .readRowsSettings() - .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_2_codes")) - .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_2_params")); + .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_1_codes")) + .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_1_params")); builder .splitReadStreamSettings() - .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_3_codes")) - .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_3_params")); + .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_2_codes")) + .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_2_params")); return builder; } - protected Builder(BigQueryReadStubSettings settings) { - super(settings); - - createReadSessionSettings = settings.createReadSessionSettings.toBuilder(); - readRowsSettings = settings.readRowsSettings.toBuilder(); - splitReadStreamSettings = settings.splitReadStreamSettings.toBuilder(); - - unaryMethodSettingsBuilders = - ImmutableList.>of( - createReadSessionSettings, splitReadStreamSettings); - } - - // NEXT_MAJOR_VER: remove 'throws Exception' + // NEXT_MAJOR_VER: remove 'throws Exception'. /** * Applies the given settings updater function to all of the unary API methods in this service. * diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/stub/GrpcBigQueryReadCallableFactory.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/stub/GrpcBigQueryReadCallableFactory.java index 886b58e704..3da5e2a734 100644 --- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/stub/GrpcBigQueryReadCallableFactory.java +++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/stub/GrpcBigQueryReadCallableFactory.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,9 +13,9 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.bigquery.storage.v1.stub; -import com.google.api.core.BetaApi; import com.google.api.gax.grpc.GrpcCallSettings; import com.google.api.gax.grpc.GrpcCallableFactory; import com.google.api.gax.grpc.GrpcStubCallableFactory; @@ -31,18 +31,19 @@ import com.google.api.gax.rpc.StreamingCallSettings; import com.google.api.gax.rpc.UnaryCallSettings; import com.google.api.gax.rpc.UnaryCallable; +import com.google.longrunning.Operation; import com.google.longrunning.stub.OperationsStub; import javax.annotation.Generated; -// AUTO-GENERATED DOCUMENTATION AND CLASS +// AUTO-GENERATED DOCUMENTATION AND CLASS. /** - * gRPC callable factory implementation for BigQuery Storage API. + * gRPC callable factory implementation for the BigQueryRead service API. * *

This class is for advanced usage. */ @Generated("by gapic-generator") -@BetaApi("The surface for use by generated code is not stable yet and may change in the future.") public class GrpcBigQueryReadCallableFactory implements GrpcStubCallableFactory { + @Override public UnaryCallable createUnaryCallable( GrpcCallSettings grpcCallSettings, @@ -55,61 +56,58 @@ public UnaryCallable createUnaryCalla public UnaryCallable createPagedCallable( GrpcCallSettings grpcCallSettings, - PagedCallSettings pagedCallSettings, + PagedCallSettings callSettings, ClientContext clientContext) { - return GrpcCallableFactory.createPagedCallable( - grpcCallSettings, pagedCallSettings, clientContext); + return GrpcCallableFactory.createPagedCallable(grpcCallSettings, callSettings, clientContext); } @Override public UnaryCallable createBatchingCallable( GrpcCallSettings grpcCallSettings, - BatchingCallSettings batchingCallSettings, + BatchingCallSettings callSettings, ClientContext clientContext) { return GrpcCallableFactory.createBatchingCallable( - grpcCallSettings, batchingCallSettings, clientContext); + grpcCallSettings, callSettings, clientContext); } - @BetaApi( - "The surface for long-running operations is not stable yet and may change in the future.") @Override public OperationCallable createOperationCallable( - GrpcCallSettings grpcCallSettings, - OperationCallSettings operationCallSettings, + GrpcCallSettings grpcCallSettings, + OperationCallSettings callSettings, ClientContext clientContext, OperationsStub operationsStub) { return GrpcCallableFactory.createOperationCallable( - grpcCallSettings, operationCallSettings, clientContext, operationsStub); + grpcCallSettings, callSettings, clientContext, operationsStub); } @Override public BidiStreamingCallable createBidiStreamingCallable( GrpcCallSettings grpcCallSettings, - StreamingCallSettings streamingCallSettings, + StreamingCallSettings callSettings, ClientContext clientContext) { return GrpcCallableFactory.createBidiStreamingCallable( - grpcCallSettings, streamingCallSettings, clientContext); + grpcCallSettings, callSettings, clientContext); } @Override public ServerStreamingCallable createServerStreamingCallable( GrpcCallSettings grpcCallSettings, - ServerStreamingCallSettings streamingCallSettings, + ServerStreamingCallSettings callSettings, ClientContext clientContext) { return GrpcCallableFactory.createServerStreamingCallable( - grpcCallSettings, streamingCallSettings, clientContext); + grpcCallSettings, callSettings, clientContext); } @Override public ClientStreamingCallable createClientStreamingCallable( GrpcCallSettings grpcCallSettings, - StreamingCallSettings streamingCallSettings, + StreamingCallSettings callSettings, ClientContext clientContext) { return GrpcCallableFactory.createClientStreamingCallable( - grpcCallSettings, streamingCallSettings, clientContext); + grpcCallSettings, callSettings, clientContext); } } diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/stub/GrpcBigQueryReadStub.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/stub/GrpcBigQueryReadStub.java index edb90c4e5a..2b2b70d216 100644 --- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/stub/GrpcBigQueryReadStub.java +++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/stub/GrpcBigQueryReadStub.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,9 +13,9 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.bigquery.storage.v1.stub; -import com.google.api.core.BetaApi; import com.google.api.gax.core.BackgroundResource; import com.google.api.gax.core.BackgroundResourceAggregation; import com.google.api.gax.grpc.GrpcCallSettings; @@ -31,6 +31,7 @@ import com.google.cloud.bigquery.storage.v1.SplitReadStreamRequest; import com.google.cloud.bigquery.storage.v1.SplitReadStreamResponse; import com.google.common.collect.ImmutableMap; +import com.google.longrunning.stub.GrpcOperationsStub; import io.grpc.MethodDescriptor; import io.grpc.protobuf.ProtoUtils; import java.io.IOException; @@ -38,16 +39,14 @@ import java.util.concurrent.TimeUnit; import javax.annotation.Generated; -// AUTO-GENERATED DOCUMENTATION AND CLASS +// AUTO-GENERATED DOCUMENTATION AND CLASS. /** - * gRPC stub implementation for BigQuery Storage API. + * gRPC stub implementation for the BigQueryRead service API. * *

This class is for advanced usage and reflects the underlying API directly. */ -@Generated("by gapic-generator") -@BetaApi("A restructuring of stub classes is planned, so this may break in the future") +@Generated("by gapic-generator-java") public class GrpcBigQueryReadStub extends BigQueryReadStub { - private static final MethodDescriptor createReadSessionMethodDescriptor = MethodDescriptor.newBuilder() @@ -57,6 +56,7 @@ public class GrpcBigQueryReadStub extends BigQueryReadStub { ProtoUtils.marshaller(CreateReadSessionRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(ReadSession.getDefaultInstance())) .build(); + private static final MethodDescriptor readRowsMethodDescriptor = MethodDescriptor.newBuilder() @@ -65,6 +65,7 @@ public class GrpcBigQueryReadStub extends BigQueryReadStub { .setRequestMarshaller(ProtoUtils.marshaller(ReadRowsRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(ReadRowsResponse.getDefaultInstance())) .build(); + private static final MethodDescriptor splitReadStreamMethodDescriptor = MethodDescriptor.newBuilder() @@ -76,13 +77,13 @@ public class GrpcBigQueryReadStub extends BigQueryReadStub { ProtoUtils.marshaller(SplitReadStreamResponse.getDefaultInstance())) .build(); - private final BackgroundResource backgroundResources; - private final UnaryCallable createReadSessionCallable; private final ServerStreamingCallable readRowsCallable; private final UnaryCallable splitReadStreamCallable; + private final BackgroundResource backgroundResources; + private final GrpcOperationsStub operationsStub; private final GrpcStubCallableFactory callableFactory; public static final GrpcBigQueryReadStub create(BigQueryReadStubSettings settings) @@ -121,6 +122,7 @@ protected GrpcBigQueryReadStub( GrpcStubCallableFactory callableFactory) throws IOException { this.callableFactory = callableFactory; + this.operationsStub = GrpcOperationsStub.create(clientContext, callableFactory); GrpcCallSettings createReadSessionTransportSettings = GrpcCallSettings.newBuilder() @@ -176,7 +178,12 @@ public Map extract(SplitReadStreamRequest request) { callableFactory.createUnaryCallable( splitReadStreamTransportSettings, settings.splitReadStreamSettings(), clientContext); - backgroundResources = new BackgroundResourceAggregation(clientContext.getBackgroundResources()); + this.backgroundResources = + new BackgroundResourceAggregation(clientContext.getBackgroundResources()); + } + + public GrpcOperationsStub getOperationsStub() { + return operationsStub; } public UnaryCallable createReadSessionCallable() { diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/BigQueryWriteClient.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/BigQueryWriteClient.java index edeb6e2800..c1259084b5 100644 --- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/BigQueryWriteClient.java +++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/BigQueryWriteClient.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,30 +13,20 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.bigquery.storage.v1alpha2; import com.google.api.core.BetaApi; import com.google.api.gax.core.BackgroundResource; import com.google.api.gax.rpc.BidiStreamingCallable; import com.google.api.gax.rpc.UnaryCallable; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.AppendRowsRequest; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.AppendRowsResponse; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.BatchCommitWriteStreamsRequest; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.BatchCommitWriteStreamsResponse; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.CreateWriteStreamRequest; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.FinalizeWriteStreamRequest; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.FinalizeWriteStreamResponse; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.FlushRowsRequest; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.FlushRowsResponse; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.GetWriteStreamRequest; -import com.google.cloud.bigquery.storage.v1alpha2.Stream.WriteStream; import com.google.cloud.bigquery.storage.v1alpha2.stub.BigQueryWriteStub; import com.google.cloud.bigquery.storage.v1alpha2.stub.BigQueryWriteStubSettings; import java.io.IOException; import java.util.concurrent.TimeUnit; import javax.annotation.Generated; -// AUTO-GENERATED DOCUMENTATION AND SERVICE +// AUTO-GENERATED DOCUMENTATION AND CLASS. /** * Service Description: BigQuery Write API. * @@ -45,17 +35,7 @@ *

This class provides the ability to make remote calls to the backing service through method * calls that map to API methods. Sample code to get started: * - *

- * 
- * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
- *   TableName parent = TableName.of("[PROJECT]", "[DATASET]", "[TABLE]");
- *   WriteStream writeStream = WriteStream.newBuilder().build();
- *   WriteStream response = bigQueryWriteClient.createWriteStream(parent, writeStream);
- * }
- * 
- * 
- * - *

Note: close() needs to be called on the bigQueryWriteClient object to clean up resources such + *

Note: close() needs to be called on the BigQueryWriteClient object to clean up resources such * as threads. In the example above, try-with-resources is used, which automatically calls close(). * *

The surface of this class includes several types of Java methods for each of the API's @@ -83,30 +63,26 @@ * *

To customize credentials: * - *

- * 
+ * 
{@code
  * BigQueryWriteSettings bigQueryWriteSettings =
  *     BigQueryWriteSettings.newBuilder()
  *         .setCredentialsProvider(FixedCredentialsProvider.create(myCredentials))
  *         .build();
- * BigQueryWriteClient bigQueryWriteClient =
- *     BigQueryWriteClient.create(bigQueryWriteSettings);
- * 
- * 
+ * BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create(bigQueryWriteSettings); + * }
* - * To customize the endpoint: + *

To customize the endpoint: * - *

- * 
+ * 
{@code
  * BigQueryWriteSettings bigQueryWriteSettings =
  *     BigQueryWriteSettings.newBuilder().setEndpoint(myEndpoint).build();
- * BigQueryWriteClient bigQueryWriteClient =
- *     BigQueryWriteClient.create(bigQueryWriteSettings);
- * 
- * 
+ * BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create(bigQueryWriteSettings); + * }
+ * + *

Please refer to the GitHub repository's samples for more quickstart code snippets. */ -@Generated("by gapic-generator") @BetaApi +@Generated("by gapic-generator") public class BigQueryWriteClient implements BackgroundResource { private final BigQueryWriteSettings settings; private final BigQueryWriteStub stub; @@ -127,7 +103,7 @@ public static final BigQueryWriteClient create(BigQueryWriteSettings settings) /** * Constructs an instance of BigQueryWriteClient, using the given stub for making calls. This is - * for advanced usage - prefer to use BigQueryWriteSettings}. + * for advanced usage - prefer using create(BigQueryWriteSettings). */ @BetaApi("A restructuring of stub classes is planned, so this may break in the future") public static final BigQueryWriteClient create(BigQueryWriteStub stub) { @@ -159,109 +135,66 @@ public BigQueryWriteStub getStub() { return stub; } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Creates a write stream to the given table. * - *

Sample code: - * - *


-   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
-   *   TableName parent = TableName.of("[PROJECT]", "[DATASET]", "[TABLE]");
-   *   WriteStream writeStream = WriteStream.newBuilder().build();
-   *   WriteStream response = bigQueryWriteClient.createWriteStream(parent, writeStream);
-   * }
-   * 
- * * @param parent Required. Reference to the table to which the stream belongs, in the format of * `projects/{project}/datasets/{dataset}/tables/{table}`. * @param writeStream Required. Stream to be created. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ - public final WriteStream createWriteStream(TableName parent, WriteStream writeStream) { - CreateWriteStreamRequest request = - CreateWriteStreamRequest.newBuilder() + public final Stream.WriteStream createWriteStream( + TableName parent, Stream.WriteStream writeStream) { + Storage.CreateWriteStreamRequest request = + Storage.CreateWriteStreamRequest.newBuilder() .setParent(parent == null ? null : parent.toString()) .setWriteStream(writeStream) .build(); return createWriteStream(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Creates a write stream to the given table. * - *

Sample code: - * - *


-   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
-   *   TableName parent = TableName.of("[PROJECT]", "[DATASET]", "[TABLE]");
-   *   WriteStream writeStream = WriteStream.newBuilder().build();
-   *   WriteStream response = bigQueryWriteClient.createWriteStream(parent.toString(), writeStream);
-   * }
-   * 
- * * @param parent Required. Reference to the table to which the stream belongs, in the format of * `projects/{project}/datasets/{dataset}/tables/{table}`. * @param writeStream Required. Stream to be created. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ - public final WriteStream createWriteStream(String parent, WriteStream writeStream) { - CreateWriteStreamRequest request = - CreateWriteStreamRequest.newBuilder().setParent(parent).setWriteStream(writeStream).build(); + public final Stream.WriteStream createWriteStream(String parent, Stream.WriteStream writeStream) { + Storage.CreateWriteStreamRequest request = + Storage.CreateWriteStreamRequest.newBuilder() + .setParent(parent) + .setWriteStream(writeStream) + .build(); return createWriteStream(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Creates a write stream to the given table. * - *

Sample code: - * - *


-   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
-   *   TableName parent = TableName.of("[PROJECT]", "[DATASET]", "[TABLE]");
-   *   WriteStream writeStream = WriteStream.newBuilder().build();
-   *   CreateWriteStreamRequest request = CreateWriteStreamRequest.newBuilder()
-   *     .setParent(parent.toString())
-   *     .setWriteStream(writeStream)
-   *     .build();
-   *   WriteStream response = bigQueryWriteClient.createWriteStream(request);
-   * }
-   * 
- * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ - public final WriteStream createWriteStream(CreateWriteStreamRequest request) { + public final Stream.WriteStream createWriteStream(Storage.CreateWriteStreamRequest request) { return createWriteStreamCallable().call(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Creates a write stream to the given table. * *

Sample code: - * - *


-   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
-   *   TableName parent = TableName.of("[PROJECT]", "[DATASET]", "[TABLE]");
-   *   WriteStream writeStream = WriteStream.newBuilder().build();
-   *   CreateWriteStreamRequest request = CreateWriteStreamRequest.newBuilder()
-   *     .setParent(parent.toString())
-   *     .setWriteStream(writeStream)
-   *     .build();
-   *   ApiFuture<WriteStream> future = bigQueryWriteClient.createWriteStreamCallable().futureCall(request);
-   *   // Do something
-   *   WriteStream response = future.get();
-   * }
-   * 
*/ - public final UnaryCallable createWriteStreamCallable() { + public final UnaryCallable + createWriteStreamCallable() { return stub.createWriteStreamCallable(); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Appends data to the given stream. * @@ -281,396 +214,229 @@ public final UnaryCallable createWriteStr * the stream is committed. * *

Sample code: - * - *


-   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
-   *   BidiStream<AppendRowsRequest, AppendRowsResponse> bidiStream =
-   *       bigQueryWriteClient.appendRowsCallable().call();
-   *
-   *   WriteStreamName writeStream = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]");
-   *   AppendRowsRequest request = AppendRowsRequest.newBuilder()
-   *     .setWriteStream(writeStream.toString())
-   *     .build();
-   *   bidiStream.send(request);
-   *   for (AppendRowsResponse response : bidiStream) {
-   *     // Do something when receive a response
-   *   }
-   * }
-   * 
*/ - public final BidiStreamingCallable appendRowsCallable() { + public final BidiStreamingCallable + appendRowsCallable() { return stub.appendRowsCallable(); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Gets a write stream. * - *

Sample code: - * - *


-   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
-   *   WriteStreamName name = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]");
-   *   WriteStream response = bigQueryWriteClient.getWriteStream(name);
-   * }
-   * 
- * * @param name Required. Name of the stream to get, in the form of * `projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}`. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ - public final WriteStream getWriteStream(WriteStreamName name) { - GetWriteStreamRequest request = - GetWriteStreamRequest.newBuilder().setName(name == null ? null : name.toString()).build(); + public final Stream.WriteStream getWriteStream(WriteStreamName name) { + Storage.GetWriteStreamRequest request = + Storage.GetWriteStreamRequest.newBuilder() + .setName(name == null ? null : name.toString()) + .build(); return getWriteStream(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Gets a write stream. * - *

Sample code: - * - *


-   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
-   *   WriteStreamName name = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]");
-   *   WriteStream response = bigQueryWriteClient.getWriteStream(name.toString());
-   * }
-   * 
- * * @param name Required. Name of the stream to get, in the form of * `projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}`. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ - public final WriteStream getWriteStream(String name) { - GetWriteStreamRequest request = GetWriteStreamRequest.newBuilder().setName(name).build(); + public final Stream.WriteStream getWriteStream(String name) { + Storage.GetWriteStreamRequest request = + Storage.GetWriteStreamRequest.newBuilder().setName(name).build(); return getWriteStream(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Gets a write stream. * - *

Sample code: - * - *


-   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
-   *   WriteStreamName name = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]");
-   *   GetWriteStreamRequest request = GetWriteStreamRequest.newBuilder()
-   *     .setName(name.toString())
-   *     .build();
-   *   WriteStream response = bigQueryWriteClient.getWriteStream(request);
-   * }
-   * 
- * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ - public final WriteStream getWriteStream(GetWriteStreamRequest request) { + public final Stream.WriteStream getWriteStream(Storage.GetWriteStreamRequest request) { return getWriteStreamCallable().call(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Gets a write stream. * *

Sample code: - * - *


-   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
-   *   WriteStreamName name = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]");
-   *   GetWriteStreamRequest request = GetWriteStreamRequest.newBuilder()
-   *     .setName(name.toString())
-   *     .build();
-   *   ApiFuture<WriteStream> future = bigQueryWriteClient.getWriteStreamCallable().futureCall(request);
-   *   // Do something
-   *   WriteStream response = future.get();
-   * }
-   * 
*/ - public final UnaryCallable getWriteStreamCallable() { + public final UnaryCallable + getWriteStreamCallable() { return stub.getWriteStreamCallable(); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Finalize a write stream so that no new data can be appended to the stream. * - *

Sample code: - * - *


-   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
-   *   WriteStreamName name = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]");
-   *   FinalizeWriteStreamResponse response = bigQueryWriteClient.finalizeWriteStream(name);
-   * }
-   * 
- * * @param name Required. Name of the stream to finalize, in the form of * `projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}`. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ - public final FinalizeWriteStreamResponse finalizeWriteStream(WriteStreamName name) { - FinalizeWriteStreamRequest request = - FinalizeWriteStreamRequest.newBuilder() + public final Storage.FinalizeWriteStreamResponse finalizeWriteStream(WriteStreamName name) { + Storage.FinalizeWriteStreamRequest request = + Storage.FinalizeWriteStreamRequest.newBuilder() .setName(name == null ? null : name.toString()) .build(); return finalizeWriteStream(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Finalize a write stream so that no new data can be appended to the stream. * - *

Sample code: - * - *


-   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
-   *   WriteStreamName name = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]");
-   *   FinalizeWriteStreamResponse response = bigQueryWriteClient.finalizeWriteStream(name.toString());
-   * }
-   * 
- * * @param name Required. Name of the stream to finalize, in the form of * `projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}`. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ - public final FinalizeWriteStreamResponse finalizeWriteStream(String name) { - FinalizeWriteStreamRequest request = - FinalizeWriteStreamRequest.newBuilder().setName(name).build(); + public final Storage.FinalizeWriteStreamResponse finalizeWriteStream(String name) { + Storage.FinalizeWriteStreamRequest request = + Storage.FinalizeWriteStreamRequest.newBuilder().setName(name).build(); return finalizeWriteStream(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Finalize a write stream so that no new data can be appended to the stream. * - *

Sample code: - * - *


-   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
-   *   WriteStreamName name = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]");
-   *   FinalizeWriteStreamRequest request = FinalizeWriteStreamRequest.newBuilder()
-   *     .setName(name.toString())
-   *     .build();
-   *   FinalizeWriteStreamResponse response = bigQueryWriteClient.finalizeWriteStream(request);
-   * }
-   * 
- * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ - public final FinalizeWriteStreamResponse finalizeWriteStream(FinalizeWriteStreamRequest request) { + public final Storage.FinalizeWriteStreamResponse finalizeWriteStream( + Storage.FinalizeWriteStreamRequest request) { return finalizeWriteStreamCallable().call(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Finalize a write stream so that no new data can be appended to the stream. * *

Sample code: - * - *


-   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
-   *   WriteStreamName name = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]");
-   *   FinalizeWriteStreamRequest request = FinalizeWriteStreamRequest.newBuilder()
-   *     .setName(name.toString())
-   *     .build();
-   *   ApiFuture<FinalizeWriteStreamResponse> future = bigQueryWriteClient.finalizeWriteStreamCallable().futureCall(request);
-   *   // Do something
-   *   FinalizeWriteStreamResponse response = future.get();
-   * }
-   * 
*/ - public final UnaryCallable + public final UnaryCallable< + Storage.FinalizeWriteStreamRequest, Storage.FinalizeWriteStreamResponse> finalizeWriteStreamCallable() { return stub.finalizeWriteStreamCallable(); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Atomically commits a group of `PENDING` streams that belong to the same `parent` table. Streams * must be finalized before commit and cannot be committed multiple times. Once a stream is * committed, data in the stream becomes available for read operations. * - *

Sample code: - * - *


-   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
-   *   TableName parent = TableName.of("[PROJECT]", "[DATASET]", "[TABLE]");
-   *   BatchCommitWriteStreamsResponse response = bigQueryWriteClient.batchCommitWriteStreams(parent);
-   * }
-   * 
- * * @param parent Required. Parent table that all the streams should belong to, in the form of * `projects/{project}/datasets/{dataset}/tables/{table}`. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ - public final BatchCommitWriteStreamsResponse batchCommitWriteStreams(TableName parent) { - BatchCommitWriteStreamsRequest request = - BatchCommitWriteStreamsRequest.newBuilder() + public final Storage.BatchCommitWriteStreamsResponse batchCommitWriteStreams(TableName parent) { + Storage.BatchCommitWriteStreamsRequest request = + Storage.BatchCommitWriteStreamsRequest.newBuilder() .setParent(parent == null ? null : parent.toString()) .build(); return batchCommitWriteStreams(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Atomically commits a group of `PENDING` streams that belong to the same `parent` table. Streams * must be finalized before commit and cannot be committed multiple times. Once a stream is * committed, data in the stream becomes available for read operations. * - *

Sample code: - * - *


-   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
-   *   TableName parent = TableName.of("[PROJECT]", "[DATASET]", "[TABLE]");
-   *   BatchCommitWriteStreamsResponse response = bigQueryWriteClient.batchCommitWriteStreams(parent.toString());
-   * }
-   * 
- * * @param parent Required. Parent table that all the streams should belong to, in the form of * `projects/{project}/datasets/{dataset}/tables/{table}`. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ - public final BatchCommitWriteStreamsResponse batchCommitWriteStreams(String parent) { - BatchCommitWriteStreamsRequest request = - BatchCommitWriteStreamsRequest.newBuilder().setParent(parent).build(); + public final Storage.BatchCommitWriteStreamsResponse batchCommitWriteStreams(String parent) { + Storage.BatchCommitWriteStreamsRequest request = + Storage.BatchCommitWriteStreamsRequest.newBuilder().setParent(parent).build(); return batchCommitWriteStreams(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Atomically commits a group of `PENDING` streams that belong to the same `parent` table. Streams * must be finalized before commit and cannot be committed multiple times. Once a stream is * committed, data in the stream becomes available for read operations. * - *

Sample code: - * - *


-   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
-   *   TableName parent = TableName.of("[PROJECT]", "[DATASET]", "[TABLE]");
-   *   List<String> writeStreams = new ArrayList<>();
-   *   BatchCommitWriteStreamsRequest request = BatchCommitWriteStreamsRequest.newBuilder()
-   *     .setParent(parent.toString())
-   *     .addAllWriteStreams(writeStreams)
-   *     .build();
-   *   BatchCommitWriteStreamsResponse response = bigQueryWriteClient.batchCommitWriteStreams(request);
-   * }
-   * 
- * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ - public final BatchCommitWriteStreamsResponse batchCommitWriteStreams( - BatchCommitWriteStreamsRequest request) { + public final Storage.BatchCommitWriteStreamsResponse batchCommitWriteStreams( + Storage.BatchCommitWriteStreamsRequest request) { return batchCommitWriteStreamsCallable().call(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Atomically commits a group of `PENDING` streams that belong to the same `parent` table. Streams * must be finalized before commit and cannot be committed multiple times. Once a stream is * committed, data in the stream becomes available for read operations. * *

Sample code: - * - *


-   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
-   *   TableName parent = TableName.of("[PROJECT]", "[DATASET]", "[TABLE]");
-   *   List<String> writeStreams = new ArrayList<>();
-   *   BatchCommitWriteStreamsRequest request = BatchCommitWriteStreamsRequest.newBuilder()
-   *     .setParent(parent.toString())
-   *     .addAllWriteStreams(writeStreams)
-   *     .build();
-   *   ApiFuture<BatchCommitWriteStreamsResponse> future = bigQueryWriteClient.batchCommitWriteStreamsCallable().futureCall(request);
-   *   // Do something
-   *   BatchCommitWriteStreamsResponse response = future.get();
-   * }
-   * 
*/ - public final UnaryCallable + public final UnaryCallable< + Storage.BatchCommitWriteStreamsRequest, Storage.BatchCommitWriteStreamsResponse> batchCommitWriteStreamsCallable() { return stub.batchCommitWriteStreamsCallable(); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Flushes rows to a BUFFERED stream. If users are appending rows to BUFFERED stream, flush * operation is required in order for the rows to become available for reading. A Flush operation * flushes up to any previously flushed offset in a BUFFERED stream, to the offset specified in * the request. * - *

Sample code: - * - *


-   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
-   *   WriteStreamName writeStream = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]");
-   *   FlushRowsResponse response = bigQueryWriteClient.flushRows(writeStream);
-   * }
-   * 
- * * @param writeStream Required. The stream that is the target of the flush operation. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ - public final FlushRowsResponse flushRows(WriteStreamName writeStream) { - FlushRowsRequest request = - FlushRowsRequest.newBuilder() + public final Storage.FlushRowsResponse flushRows(WriteStreamName writeStream) { + Storage.FlushRowsRequest request = + Storage.FlushRowsRequest.newBuilder() .setWriteStream(writeStream == null ? null : writeStream.toString()) .build(); return flushRows(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Flushes rows to a BUFFERED stream. If users are appending rows to BUFFERED stream, flush * operation is required in order for the rows to become available for reading. A Flush operation * flushes up to any previously flushed offset in a BUFFERED stream, to the offset specified in * the request. * - *

Sample code: - * - *


-   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
-   *   WriteStreamName writeStream = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]");
-   *   FlushRowsResponse response = bigQueryWriteClient.flushRows(writeStream.toString());
-   * }
-   * 
- * * @param writeStream Required. The stream that is the target of the flush operation. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ - public final FlushRowsResponse flushRows(String writeStream) { - FlushRowsRequest request = FlushRowsRequest.newBuilder().setWriteStream(writeStream).build(); + public final Storage.FlushRowsResponse flushRows(String writeStream) { + Storage.FlushRowsRequest request = + Storage.FlushRowsRequest.newBuilder().setWriteStream(writeStream).build(); return flushRows(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Flushes rows to a BUFFERED stream. If users are appending rows to BUFFERED stream, flush * operation is required in order for the rows to become available for reading. A Flush operation * flushes up to any previously flushed offset in a BUFFERED stream, to the offset specified in * the request. * - *

Sample code: - * - *


-   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
-   *   WriteStreamName writeStream = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]");
-   *   FlushRowsRequest request = FlushRowsRequest.newBuilder()
-   *     .setWriteStream(writeStream.toString())
-   *     .build();
-   *   FlushRowsResponse response = bigQueryWriteClient.flushRows(request);
-   * }
-   * 
- * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ - public final FlushRowsResponse flushRows(FlushRowsRequest request) { + public final Storage.FlushRowsResponse flushRows(Storage.FlushRowsRequest request) { return flushRowsCallable().call(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Flushes rows to a BUFFERED stream. If users are appending rows to BUFFERED stream, flush * operation is required in order for the rows to become available for reading. A Flush operation @@ -678,20 +444,9 @@ public final FlushRowsResponse flushRows(FlushRowsRequest request) { * the request. * *

Sample code: - * - *


-   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
-   *   WriteStreamName writeStream = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]");
-   *   FlushRowsRequest request = FlushRowsRequest.newBuilder()
-   *     .setWriteStream(writeStream.toString())
-   *     .build();
-   *   ApiFuture<FlushRowsResponse> future = bigQueryWriteClient.flushRowsCallable().futureCall(request);
-   *   // Do something
-   *   FlushRowsResponse response = future.get();
-   * }
-   * 
*/ - public final UnaryCallable flushRowsCallable() { + public final UnaryCallable + flushRowsCallable() { return stub.flushRowsCallable(); } diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/BigQueryWriteSettings.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/BigQueryWriteSettings.java index a029c17d0e..2bfe1ee7e0 100644 --- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/BigQueryWriteSettings.java +++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/BigQueryWriteSettings.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,6 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.bigquery.storage.v1alpha2; import com.google.api.core.ApiFunction; @@ -26,23 +27,12 @@ import com.google.api.gax.rpc.StreamingCallSettings; import com.google.api.gax.rpc.TransportChannelProvider; import com.google.api.gax.rpc.UnaryCallSettings; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.AppendRowsRequest; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.AppendRowsResponse; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.BatchCommitWriteStreamsRequest; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.BatchCommitWriteStreamsResponse; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.CreateWriteStreamRequest; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.FinalizeWriteStreamRequest; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.FinalizeWriteStreamResponse; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.FlushRowsRequest; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.FlushRowsResponse; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.GetWriteStreamRequest; -import com.google.cloud.bigquery.storage.v1alpha2.Stream.WriteStream; import com.google.cloud.bigquery.storage.v1alpha2.stub.BigQueryWriteStubSettings; import java.io.IOException; import java.util.List; import javax.annotation.Generated; -// AUTO-GENERATED DOCUMENTATION AND CLASS +// AUTO-GENERATED DOCUMENTATION AND CLASS. /** * Settings class to configure an instance of {@link BigQueryWriteClient}. * @@ -60,52 +50,57 @@ * *

For example, to set the total timeout of createWriteStream to 30 seconds: * - *

- * 
- * BigQueryWriteSettings.Builder bigQueryWriteSettingsBuilder =
- *     BigQueryWriteSettings.newBuilder();
+ * 
{@code
+ * BigQueryWriteSettings.Builder bigQueryWriteSettingsBuilder = BigQueryWriteSettings.newBuilder();
  * bigQueryWriteSettingsBuilder
  *     .createWriteStreamSettings()
  *     .setRetrySettings(
- *         bigQueryWriteSettingsBuilder.createWriteStreamSettings().getRetrySettings().toBuilder()
+ *         bigQueryWriteSettingsBuilder
+ *             .createWriteStreamSettings()
+ *             .getRetrySettings()
+ *             .toBuilder()
  *             .setTotalTimeout(Duration.ofSeconds(30))
  *             .build());
  * BigQueryWriteSettings bigQueryWriteSettings = bigQueryWriteSettingsBuilder.build();
- * 
- * 
+ * }
*/ -@Generated("by gapic-generator") -@BetaApi +@Generated("by gapic-generator-java") public class BigQueryWriteSettings extends ClientSettings { + /** Returns the object with the settings used for calls to createWriteStream. */ - public UnaryCallSettings createWriteStreamSettings() { + public UnaryCallSettings + createWriteStreamSettings() { return ((BigQueryWriteStubSettings) getStubSettings()).createWriteStreamSettings(); } /** Returns the object with the settings used for calls to appendRows. */ - public StreamingCallSettings appendRowsSettings() { + public StreamingCallSettings + appendRowsSettings() { return ((BigQueryWriteStubSettings) getStubSettings()).appendRowsSettings(); } /** Returns the object with the settings used for calls to getWriteStream. */ - public UnaryCallSettings getWriteStreamSettings() { + public UnaryCallSettings + getWriteStreamSettings() { return ((BigQueryWriteStubSettings) getStubSettings()).getWriteStreamSettings(); } /** Returns the object with the settings used for calls to finalizeWriteStream. */ - public UnaryCallSettings + public UnaryCallSettings finalizeWriteStreamSettings() { return ((BigQueryWriteStubSettings) getStubSettings()).finalizeWriteStreamSettings(); } /** Returns the object with the settings used for calls to batchCommitWriteStreams. */ - public UnaryCallSettings + public UnaryCallSettings< + Storage.BatchCommitWriteStreamsRequest, Storage.BatchCommitWriteStreamsResponse> batchCommitWriteStreamsSettings() { return ((BigQueryWriteStubSettings) getStubSettings()).batchCommitWriteStreamsSettings(); } /** Returns the object with the settings used for calls to flushRows. */ - public UnaryCallSettings flushRowsSettings() { + public UnaryCallSettings + flushRowsSettings() { return ((BigQueryWriteStubSettings) getStubSettings()).flushRowsSettings(); } @@ -169,18 +164,15 @@ protected BigQueryWriteSettings(Builder settingsBuilder) throws IOException { /** Builder for BigQueryWriteSettings. */ public static class Builder extends ClientSettings.Builder { + protected Builder() throws IOException { - this((ClientContext) null); + this(((ClientContext) null)); } protected Builder(ClientContext clientContext) { super(BigQueryWriteStubSettings.newBuilder(clientContext)); } - private static Builder createDefault() { - return new Builder(BigQueryWriteStubSettings.newBuilder()); - } - protected Builder(BigQueryWriteSettings settings) { super(settings.getStubSettings().toBuilder()); } @@ -189,11 +181,15 @@ protected Builder(BigQueryWriteStubSettings.Builder stubSettings) { super(stubSettings); } + private static Builder createDefault() { + return new Builder(BigQueryWriteStubSettings.newBuilder()); + } + public BigQueryWriteStubSettings.Builder getStubSettingsBuilder() { return ((BigQueryWriteStubSettings.Builder) getStubSettings()); } - // NEXT_MAJOR_VER: remove 'throws Exception' + // NEXT_MAJOR_VER: remove 'throws Exception'. /** * Applies the given settings updater function to all of the unary API methods in this service. * @@ -207,37 +203,40 @@ public Builder applyToAllUnaryMethods( } /** Returns the builder for the settings used for calls to createWriteStream. */ - public UnaryCallSettings.Builder + public UnaryCallSettings.Builder createWriteStreamSettings() { return getStubSettingsBuilder().createWriteStreamSettings(); } /** Returns the builder for the settings used for calls to appendRows. */ - public StreamingCallSettings.Builder + public StreamingCallSettings.Builder appendRowsSettings() { return getStubSettingsBuilder().appendRowsSettings(); } /** Returns the builder for the settings used for calls to getWriteStream. */ - public UnaryCallSettings.Builder getWriteStreamSettings() { + public UnaryCallSettings.Builder + getWriteStreamSettings() { return getStubSettingsBuilder().getWriteStreamSettings(); } /** Returns the builder for the settings used for calls to finalizeWriteStream. */ - public UnaryCallSettings.Builder + public UnaryCallSettings.Builder< + Storage.FinalizeWriteStreamRequest, Storage.FinalizeWriteStreamResponse> finalizeWriteStreamSettings() { return getStubSettingsBuilder().finalizeWriteStreamSettings(); } /** Returns the builder for the settings used for calls to batchCommitWriteStreams. */ public UnaryCallSettings.Builder< - BatchCommitWriteStreamsRequest, BatchCommitWriteStreamsResponse> + Storage.BatchCommitWriteStreamsRequest, Storage.BatchCommitWriteStreamsResponse> batchCommitWriteStreamsSettings() { return getStubSettingsBuilder().batchCommitWriteStreamsSettings(); } /** Returns the builder for the settings used for calls to flushRows. */ - public UnaryCallSettings.Builder flushRowsSettings() { + public UnaryCallSettings.Builder + flushRowsSettings() { return getStubSettingsBuilder().flushRowsSettings(); } diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/package-info.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/package-info.java index d5a0a66695..561987d3b3 100644 --- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/package-info.java +++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/package-info.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,29 +15,17 @@ */ /** - * A client to BigQuery Storage API. + * The interfaces provided are listed below, along with usage samples. * - *

The interfaces provided are listed below, along with usage samples. - * - *

=================== BigQueryWriteClient =================== + *

======================= BigQueryWriteClient ======================= * *

Service Description: BigQuery Write API. * *

The Write API can be used to write data to BigQuery. * *

Sample for BigQueryWriteClient: - * - *

- * 
- * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
- *   TableName parent = TableName.of("[PROJECT]", "[DATASET]", "[TABLE]");
- *   WriteStream writeStream = WriteStream.newBuilder().build();
- *   WriteStream response = bigQueryWriteClient.createWriteStream(parent, writeStream);
- * }
- * 
- * 
*/ -@Generated("by gapic-generator") +@Generated("by gapic-generator-java") package com.google.cloud.bigquery.storage.v1alpha2; import javax.annotation.Generated; diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/stub/BigQueryWriteStub.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/stub/BigQueryWriteStub.java index c86dcd8a28..27ef0b03d7 100644 --- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/stub/BigQueryWriteStub.java +++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/stub/BigQueryWriteStub.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,58 +13,51 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.bigquery.storage.v1alpha2.stub; -import com.google.api.core.BetaApi; import com.google.api.gax.core.BackgroundResource; import com.google.api.gax.rpc.BidiStreamingCallable; import com.google.api.gax.rpc.UnaryCallable; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.AppendRowsRequest; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.AppendRowsResponse; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.BatchCommitWriteStreamsRequest; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.BatchCommitWriteStreamsResponse; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.CreateWriteStreamRequest; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.FinalizeWriteStreamRequest; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.FinalizeWriteStreamResponse; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.FlushRowsRequest; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.FlushRowsResponse; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.GetWriteStreamRequest; -import com.google.cloud.bigquery.storage.v1alpha2.Stream.WriteStream; +import com.google.cloud.bigquery.storage.v1alpha2.Storage; +import com.google.cloud.bigquery.storage.v1alpha2.Stream; import javax.annotation.Generated; -// AUTO-GENERATED DOCUMENTATION AND CLASS +// AUTO-GENERATED DOCUMENTATION AND CLASS. /** - * Base stub class for BigQuery Storage API. + * Base stub class for the BigQueryWrite service API. * *

This class is for advanced usage and reflects the underlying API directly. */ @Generated("by gapic-generator") -@BetaApi("A restructuring of stub classes is planned, so this may break in the future") public abstract class BigQueryWriteStub implements BackgroundResource { - public UnaryCallable createWriteStreamCallable() { + public UnaryCallable + createWriteStreamCallable() { throw new UnsupportedOperationException("Not implemented: createWriteStreamCallable()"); } - public BidiStreamingCallable appendRowsCallable() { + public BidiStreamingCallable + appendRowsCallable() { throw new UnsupportedOperationException("Not implemented: appendRowsCallable()"); } - public UnaryCallable getWriteStreamCallable() { + public UnaryCallable getWriteStreamCallable() { throw new UnsupportedOperationException("Not implemented: getWriteStreamCallable()"); } - public UnaryCallable + public UnaryCallable finalizeWriteStreamCallable() { throw new UnsupportedOperationException("Not implemented: finalizeWriteStreamCallable()"); } - public UnaryCallable + public UnaryCallable< + Storage.BatchCommitWriteStreamsRequest, Storage.BatchCommitWriteStreamsResponse> batchCommitWriteStreamsCallable() { throw new UnsupportedOperationException("Not implemented: batchCommitWriteStreamsCallable()"); } - public UnaryCallable flushRowsCallable() { + public UnaryCallable flushRowsCallable() { throw new UnsupportedOperationException("Not implemented: flushRowsCallable()"); } diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/stub/BigQueryWriteStubSettings.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/stub/BigQueryWriteStubSettings.java index 09fd472e31..f1d4fc571d 100644 --- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/stub/BigQueryWriteStubSettings.java +++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/stub/BigQueryWriteStubSettings.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,6 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.bigquery.storage.v1alpha2.stub; import com.google.api.core.ApiFunction; @@ -31,17 +32,8 @@ import com.google.api.gax.rpc.StubSettings; import com.google.api.gax.rpc.TransportChannelProvider; import com.google.api.gax.rpc.UnaryCallSettings; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.AppendRowsRequest; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.AppendRowsResponse; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.BatchCommitWriteStreamsRequest; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.BatchCommitWriteStreamsResponse; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.CreateWriteStreamRequest; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.FinalizeWriteStreamRequest; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.FinalizeWriteStreamResponse; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.FlushRowsRequest; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.FlushRowsResponse; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.GetWriteStreamRequest; -import com.google.cloud.bigquery.storage.v1alpha2.Stream.WriteStream; +import com.google.cloud.bigquery.storage.v1alpha2.Storage; +import com.google.cloud.bigquery.storage.v1alpha2.Stream; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; @@ -51,7 +43,7 @@ import javax.annotation.Generated; import org.threeten.bp.Duration; -// AUTO-GENERATED DOCUMENTATION AND CLASS +// AUTO-GENERATED DOCUMENTATION AND CLASS. /** * Settings class to configure an instance of {@link BigQueryWriteStub}. * @@ -69,22 +61,23 @@ * *

For example, to set the total timeout of createWriteStream to 30 seconds: * - *

- * 
+ * 
{@code
  * BigQueryWriteStubSettings.Builder bigQueryWriteSettingsBuilder =
  *     BigQueryWriteStubSettings.newBuilder();
  * bigQueryWriteSettingsBuilder
  *     .createWriteStreamSettings()
  *     .setRetrySettings(
- *         bigQueryWriteSettingsBuilder.createWriteStreamSettings().getRetrySettings().toBuilder()
+ *         bigQueryWriteSettingsBuilder
+ *             .createWriteStreamSettings()
+ *             .getRetrySettings()
+ *             .toBuilder()
  *             .setTotalTimeout(Duration.ofSeconds(30))
  *             .build());
  * BigQueryWriteStubSettings bigQueryWriteSettings = bigQueryWriteSettingsBuilder.build();
- * 
- * 
+ * }
*/ -@Generated("by gapic-generator") @BetaApi +@Generated("by gapic-generator-java") public class BigQueryWriteStubSettings extends StubSettings { /** The default scopes of the service. */ private static final ImmutableList DEFAULT_SERVICE_SCOPES = @@ -94,44 +87,55 @@ public class BigQueryWriteStubSettings extends StubSettings createWriteStreamSettings; - private final StreamingCallSettings appendRowsSettings; - private final UnaryCallSettings getWriteStreamSettings; - private final UnaryCallSettings + private final UnaryCallSettings + createWriteStreamSettings; + private final StreamingCallSettings + appendRowsSettings; + private final UnaryCallSettings + getWriteStreamSettings; + private final UnaryCallSettings< + Storage.FinalizeWriteStreamRequest, Storage.FinalizeWriteStreamResponse> finalizeWriteStreamSettings; - private final UnaryCallSettings + private final UnaryCallSettings< + Storage.BatchCommitWriteStreamsRequest, Storage.BatchCommitWriteStreamsResponse> batchCommitWriteStreamsSettings; - private final UnaryCallSettings flushRowsSettings; + private final UnaryCallSettings + flushRowsSettings; /** Returns the object with the settings used for calls to createWriteStream. */ - public UnaryCallSettings createWriteStreamSettings() { + public UnaryCallSettings + createWriteStreamSettings() { return createWriteStreamSettings; } /** Returns the object with the settings used for calls to appendRows. */ - public StreamingCallSettings appendRowsSettings() { + public StreamingCallSettings + appendRowsSettings() { return appendRowsSettings; } /** Returns the object with the settings used for calls to getWriteStream. */ - public UnaryCallSettings getWriteStreamSettings() { + public UnaryCallSettings + getWriteStreamSettings() { return getWriteStreamSettings; } /** Returns the object with the settings used for calls to finalizeWriteStream. */ - public UnaryCallSettings + public UnaryCallSettings finalizeWriteStreamSettings() { return finalizeWriteStreamSettings; } /** Returns the object with the settings used for calls to batchCommitWriteStreams. */ - public UnaryCallSettings + public UnaryCallSettings< + Storage.BatchCommitWriteStreamsRequest, Storage.BatchCommitWriteStreamsResponse> batchCommitWriteStreamsSettings() { return batchCommitWriteStreamsSettings; } /** Returns the object with the settings used for calls to flushRows. */ - public UnaryCallSettings flushRowsSettings() { + public UnaryCallSettings + flushRowsSettings() { return flushRowsSettings; } @@ -141,10 +145,10 @@ public BigQueryWriteStub createStub() throws IOException { .getTransportName() .equals(GrpcTransportChannel.getGrpcTransportName())) { return GrpcBigQueryWriteStub.create(this); - } else { - throw new UnsupportedOperationException( - "Transport not supported: " + getTransportChannelProvider().getTransportName()); } + throw new UnsupportedOperationException( + String.format( + "Transport not supported: %s", getTransportChannelProvider().getTransportName())); } /** Returns a builder for the default ExecutorProvider for this service. */ @@ -215,20 +219,21 @@ protected BigQueryWriteStubSettings(Builder settingsBuilder) throws IOException /** Builder for BigQueryWriteStubSettings. */ public static class Builder extends StubSettings.Builder { private final ImmutableList> unaryMethodSettingsBuilders; - - private final UnaryCallSettings.Builder + private final UnaryCallSettings.Builder createWriteStreamSettings; - private final StreamingCallSettings.Builder + private final StreamingCallSettings.Builder< + Storage.AppendRowsRequest, Storage.AppendRowsResponse> appendRowsSettings; - private final UnaryCallSettings.Builder + private final UnaryCallSettings.Builder getWriteStreamSettings; - private final UnaryCallSettings.Builder + private final UnaryCallSettings.Builder< + Storage.FinalizeWriteStreamRequest, Storage.FinalizeWriteStreamResponse> finalizeWriteStreamSettings; private final UnaryCallSettings.Builder< - BatchCommitWriteStreamsRequest, BatchCommitWriteStreamsResponse> + Storage.BatchCommitWriteStreamsRequest, Storage.BatchCommitWriteStreamsResponse> batchCommitWriteStreamsSettings; - private final UnaryCallSettings.Builder flushRowsSettings; - + private final UnaryCallSettings.Builder + flushRowsSettings; private static final ImmutableMap> RETRYABLE_CODE_DEFINITIONS; @@ -236,23 +241,23 @@ public static class Builder extends StubSettings.Builder> definitions = ImmutableMap.builder(); definitions.put( - "retry_policy_1_codes", + "retry_policy_0_codes", ImmutableSet.copyOf( Lists.newArrayList( StatusCode.Code.DEADLINE_EXCEEDED, StatusCode.Code.UNAVAILABLE, StatusCode.Code.RESOURCE_EXHAUSTED))); - definitions.put("no_retry_codes", ImmutableSet.copyOf(Lists.newArrayList())); definitions.put( - "retry_policy_3_codes", + "retry_policy_1_codes", ImmutableSet.copyOf( Lists.newArrayList( - StatusCode.Code.DEADLINE_EXCEEDED, StatusCode.Code.UNAVAILABLE))); + StatusCode.Code.UNAVAILABLE, StatusCode.Code.RESOURCE_EXHAUSTED))); definitions.put( "retry_policy_2_codes", ImmutableSet.copyOf( Lists.newArrayList( - StatusCode.Code.UNAVAILABLE, StatusCode.Code.RESOURCE_EXHAUSTED))); + StatusCode.Code.DEADLINE_EXCEEDED, StatusCode.Code.UNAVAILABLE))); + definitions.put("no_retry_codes", ImmutableSet.copyOf(Lists.newArrayList())); RETRYABLE_CODE_DEFINITIONS = definitions.build(); } @@ -271,7 +276,7 @@ public static class Builder extends StubSettings.Builder>of( + createWriteStreamSettings, + getWriteStreamSettings, + finalizeWriteStreamSettings, + batchCommitWriteStreamsSettings, + flushRowsSettings); + } + private static Builder createDefault() { - Builder builder = new Builder((ClientContext) null); + Builder builder = new Builder(((ClientContext) null)); + builder.setTransportChannelProvider(defaultTransportChannelProvider()); builder.setCredentialsProvider(defaultCredentialsProviderBuilder().build()); builder.setInternalHeaderProvider(defaultApiClientHeaderProviderBuilder().build()); builder.setEndpoint(getDefaultEndpoint()); + return initDefaults(builder); } private static Builder initDefaults(Builder builder) { - builder .createWriteStreamSettings() - .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_1_codes")) - .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_1_params")); + .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) + .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")); builder .getWriteStreamSettings() - .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_3_codes")) - .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_3_params")); + .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_2_codes")) + .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_2_params")); builder .finalizeWriteStreamSettings() - .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_3_codes")) - .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_3_params")); + .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_2_codes")) + .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_2_params")); builder .batchCommitWriteStreamsSettings() - .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_3_codes")) - .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_3_params")); + .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_2_codes")) + .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_2_params")); builder .flushRowsSettings() @@ -368,26 +387,7 @@ private static Builder initDefaults(Builder builder) { return builder; } - protected Builder(BigQueryWriteStubSettings settings) { - super(settings); - - createWriteStreamSettings = settings.createWriteStreamSettings.toBuilder(); - appendRowsSettings = settings.appendRowsSettings.toBuilder(); - getWriteStreamSettings = settings.getWriteStreamSettings.toBuilder(); - finalizeWriteStreamSettings = settings.finalizeWriteStreamSettings.toBuilder(); - batchCommitWriteStreamsSettings = settings.batchCommitWriteStreamsSettings.toBuilder(); - flushRowsSettings = settings.flushRowsSettings.toBuilder(); - - unaryMethodSettingsBuilders = - ImmutableList.>of( - createWriteStreamSettings, - getWriteStreamSettings, - finalizeWriteStreamSettings, - batchCommitWriteStreamsSettings, - flushRowsSettings); - } - - // NEXT_MAJOR_VER: remove 'throws Exception' + // NEXT_MAJOR_VER: remove 'throws Exception'. /** * Applies the given settings updater function to all of the unary API methods in this service. * @@ -404,37 +404,40 @@ public Builder applyToAllUnaryMethods( } /** Returns the builder for the settings used for calls to createWriteStream. */ - public UnaryCallSettings.Builder + public UnaryCallSettings.Builder createWriteStreamSettings() { return createWriteStreamSettings; } /** Returns the builder for the settings used for calls to appendRows. */ - public StreamingCallSettings.Builder + public StreamingCallSettings.Builder appendRowsSettings() { return appendRowsSettings; } /** Returns the builder for the settings used for calls to getWriteStream. */ - public UnaryCallSettings.Builder getWriteStreamSettings() { + public UnaryCallSettings.Builder + getWriteStreamSettings() { return getWriteStreamSettings; } /** Returns the builder for the settings used for calls to finalizeWriteStream. */ - public UnaryCallSettings.Builder + public UnaryCallSettings.Builder< + Storage.FinalizeWriteStreamRequest, Storage.FinalizeWriteStreamResponse> finalizeWriteStreamSettings() { return finalizeWriteStreamSettings; } /** Returns the builder for the settings used for calls to batchCommitWriteStreams. */ public UnaryCallSettings.Builder< - BatchCommitWriteStreamsRequest, BatchCommitWriteStreamsResponse> + Storage.BatchCommitWriteStreamsRequest, Storage.BatchCommitWriteStreamsResponse> batchCommitWriteStreamsSettings() { return batchCommitWriteStreamsSettings; } /** Returns the builder for the settings used for calls to flushRows. */ - public UnaryCallSettings.Builder flushRowsSettings() { + public UnaryCallSettings.Builder + flushRowsSettings() { return flushRowsSettings; } diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/stub/GrpcBigQueryWriteCallableFactory.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/stub/GrpcBigQueryWriteCallableFactory.java index e1e5621cdf..f80038c9ae 100644 --- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/stub/GrpcBigQueryWriteCallableFactory.java +++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/stub/GrpcBigQueryWriteCallableFactory.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,9 +13,9 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.bigquery.storage.v1alpha2.stub; -import com.google.api.core.BetaApi; import com.google.api.gax.grpc.GrpcCallSettings; import com.google.api.gax.grpc.GrpcCallableFactory; import com.google.api.gax.grpc.GrpcStubCallableFactory; @@ -31,18 +31,19 @@ import com.google.api.gax.rpc.StreamingCallSettings; import com.google.api.gax.rpc.UnaryCallSettings; import com.google.api.gax.rpc.UnaryCallable; +import com.google.longrunning.Operation; import com.google.longrunning.stub.OperationsStub; import javax.annotation.Generated; -// AUTO-GENERATED DOCUMENTATION AND CLASS +// AUTO-GENERATED DOCUMENTATION AND CLASS. /** - * gRPC callable factory implementation for BigQuery Storage API. + * gRPC callable factory implementation for the BigQueryWrite service API. * *

This class is for advanced usage. */ @Generated("by gapic-generator") -@BetaApi("The surface for use by generated code is not stable yet and may change in the future.") public class GrpcBigQueryWriteCallableFactory implements GrpcStubCallableFactory { + @Override public UnaryCallable createUnaryCallable( GrpcCallSettings grpcCallSettings, @@ -55,61 +56,58 @@ public UnaryCallable createUnaryCalla public UnaryCallable createPagedCallable( GrpcCallSettings grpcCallSettings, - PagedCallSettings pagedCallSettings, + PagedCallSettings callSettings, ClientContext clientContext) { - return GrpcCallableFactory.createPagedCallable( - grpcCallSettings, pagedCallSettings, clientContext); + return GrpcCallableFactory.createPagedCallable(grpcCallSettings, callSettings, clientContext); } @Override public UnaryCallable createBatchingCallable( GrpcCallSettings grpcCallSettings, - BatchingCallSettings batchingCallSettings, + BatchingCallSettings callSettings, ClientContext clientContext) { return GrpcCallableFactory.createBatchingCallable( - grpcCallSettings, batchingCallSettings, clientContext); + grpcCallSettings, callSettings, clientContext); } - @BetaApi( - "The surface for long-running operations is not stable yet and may change in the future.") @Override public OperationCallable createOperationCallable( - GrpcCallSettings grpcCallSettings, - OperationCallSettings operationCallSettings, + GrpcCallSettings grpcCallSettings, + OperationCallSettings callSettings, ClientContext clientContext, OperationsStub operationsStub) { return GrpcCallableFactory.createOperationCallable( - grpcCallSettings, operationCallSettings, clientContext, operationsStub); + grpcCallSettings, callSettings, clientContext, operationsStub); } @Override public BidiStreamingCallable createBidiStreamingCallable( GrpcCallSettings grpcCallSettings, - StreamingCallSettings streamingCallSettings, + StreamingCallSettings callSettings, ClientContext clientContext) { return GrpcCallableFactory.createBidiStreamingCallable( - grpcCallSettings, streamingCallSettings, clientContext); + grpcCallSettings, callSettings, clientContext); } @Override public ServerStreamingCallable createServerStreamingCallable( GrpcCallSettings grpcCallSettings, - ServerStreamingCallSettings streamingCallSettings, + ServerStreamingCallSettings callSettings, ClientContext clientContext) { return GrpcCallableFactory.createServerStreamingCallable( - grpcCallSettings, streamingCallSettings, clientContext); + grpcCallSettings, callSettings, clientContext); } @Override public ClientStreamingCallable createClientStreamingCallable( GrpcCallSettings grpcCallSettings, - StreamingCallSettings streamingCallSettings, + StreamingCallSettings callSettings, ClientContext clientContext) { return GrpcCallableFactory.createClientStreamingCallable( - grpcCallSettings, streamingCallSettings, clientContext); + grpcCallSettings, callSettings, clientContext); } } diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/stub/GrpcBigQueryWriteStub.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/stub/GrpcBigQueryWriteStub.java index 7729ba6f9b..ec5d1ef438 100644 --- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/stub/GrpcBigQueryWriteStub.java +++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/stub/GrpcBigQueryWriteStub.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,9 +13,9 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.bigquery.storage.v1alpha2.stub; -import com.google.api.core.BetaApi; import com.google.api.gax.core.BackgroundResource; import com.google.api.gax.core.BackgroundResourceAggregation; import com.google.api.gax.grpc.GrpcCallSettings; @@ -24,18 +24,10 @@ import com.google.api.gax.rpc.ClientContext; import com.google.api.gax.rpc.RequestParamsExtractor; import com.google.api.gax.rpc.UnaryCallable; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.AppendRowsRequest; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.AppendRowsResponse; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.BatchCommitWriteStreamsRequest; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.BatchCommitWriteStreamsResponse; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.CreateWriteStreamRequest; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.FinalizeWriteStreamRequest; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.FinalizeWriteStreamResponse; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.FlushRowsRequest; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.FlushRowsResponse; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.GetWriteStreamRequest; -import com.google.cloud.bigquery.storage.v1alpha2.Stream.WriteStream; +import com.google.cloud.bigquery.storage.v1alpha2.Storage; +import com.google.cloud.bigquery.storage.v1alpha2.Stream; import com.google.common.collect.ImmutableMap; +import com.google.longrunning.stub.GrpcOperationsStub; import io.grpc.MethodDescriptor; import io.grpc.protobuf.ProtoUtils; import java.io.IOException; @@ -43,88 +35,106 @@ import java.util.concurrent.TimeUnit; import javax.annotation.Generated; -// AUTO-GENERATED DOCUMENTATION AND CLASS +// AUTO-GENERATED DOCUMENTATION AND CLASS. /** - * gRPC stub implementation for BigQuery Storage API. + * gRPC stub implementation for the BigQueryWrite service API. * *

This class is for advanced usage and reflects the underlying API directly. */ -@Generated("by gapic-generator") -@BetaApi("A restructuring of stub classes is planned, so this may break in the future") +@Generated("by gapic-generator-java") public class GrpcBigQueryWriteStub extends BigQueryWriteStub { - - private static final MethodDescriptor + private static final MethodDescriptor createWriteStreamMethodDescriptor = - MethodDescriptor.newBuilder() + MethodDescriptor.newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName( "google.cloud.bigquery.storage.v1alpha2.BigQueryWrite/CreateWriteStream") .setRequestMarshaller( - ProtoUtils.marshaller(CreateWriteStreamRequest.getDefaultInstance())) - .setResponseMarshaller(ProtoUtils.marshaller(WriteStream.getDefaultInstance())) + ProtoUtils.marshaller(Storage.CreateWriteStreamRequest.getDefaultInstance())) + .setResponseMarshaller(ProtoUtils.marshaller(Stream.WriteStream.getDefaultInstance())) .build(); - private static final MethodDescriptor + + private static final MethodDescriptor appendRowsMethodDescriptor = - MethodDescriptor.newBuilder() + MethodDescriptor.newBuilder() .setType(MethodDescriptor.MethodType.BIDI_STREAMING) .setFullMethodName("google.cloud.bigquery.storage.v1alpha2.BigQueryWrite/AppendRows") - .setRequestMarshaller(ProtoUtils.marshaller(AppendRowsRequest.getDefaultInstance())) - .setResponseMarshaller(ProtoUtils.marshaller(AppendRowsResponse.getDefaultInstance())) + .setRequestMarshaller( + ProtoUtils.marshaller(Storage.AppendRowsRequest.getDefaultInstance())) + .setResponseMarshaller( + ProtoUtils.marshaller(Storage.AppendRowsResponse.getDefaultInstance())) .build(); - private static final MethodDescriptor + + private static final MethodDescriptor getWriteStreamMethodDescriptor = - MethodDescriptor.newBuilder() + MethodDescriptor.newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName( "google.cloud.bigquery.storage.v1alpha2.BigQueryWrite/GetWriteStream") .setRequestMarshaller( - ProtoUtils.marshaller(GetWriteStreamRequest.getDefaultInstance())) - .setResponseMarshaller(ProtoUtils.marshaller(WriteStream.getDefaultInstance())) + ProtoUtils.marshaller(Storage.GetWriteStreamRequest.getDefaultInstance())) + .setResponseMarshaller(ProtoUtils.marshaller(Stream.WriteStream.getDefaultInstance())) .build(); - private static final MethodDescriptor + + private static final MethodDescriptor< + Storage.FinalizeWriteStreamRequest, Storage.FinalizeWriteStreamResponse> finalizeWriteStreamMethodDescriptor = - MethodDescriptor.newBuilder() + MethodDescriptor + .newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName( "google.cloud.bigquery.storage.v1alpha2.BigQueryWrite/FinalizeWriteStream") .setRequestMarshaller( - ProtoUtils.marshaller(FinalizeWriteStreamRequest.getDefaultInstance())) + ProtoUtils.marshaller(Storage.FinalizeWriteStreamRequest.getDefaultInstance())) .setResponseMarshaller( - ProtoUtils.marshaller(FinalizeWriteStreamResponse.getDefaultInstance())) + ProtoUtils.marshaller(Storage.FinalizeWriteStreamResponse.getDefaultInstance())) .build(); + private static final MethodDescriptor< - BatchCommitWriteStreamsRequest, BatchCommitWriteStreamsResponse> + Storage.BatchCommitWriteStreamsRequest, Storage.BatchCommitWriteStreamsResponse> batchCommitWriteStreamsMethodDescriptor = MethodDescriptor - .newBuilder() + . + newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName( "google.cloud.bigquery.storage.v1alpha2.BigQueryWrite/BatchCommitWriteStreams") .setRequestMarshaller( - ProtoUtils.marshaller(BatchCommitWriteStreamsRequest.getDefaultInstance())) + ProtoUtils.marshaller( + Storage.BatchCommitWriteStreamsRequest.getDefaultInstance())) .setResponseMarshaller( - ProtoUtils.marshaller(BatchCommitWriteStreamsResponse.getDefaultInstance())) + ProtoUtils.marshaller( + Storage.BatchCommitWriteStreamsResponse.getDefaultInstance())) .build(); - private static final MethodDescriptor + + private static final MethodDescriptor flushRowsMethodDescriptor = - MethodDescriptor.newBuilder() + MethodDescriptor.newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName("google.cloud.bigquery.storage.v1alpha2.BigQueryWrite/FlushRows") - .setRequestMarshaller(ProtoUtils.marshaller(FlushRowsRequest.getDefaultInstance())) - .setResponseMarshaller(ProtoUtils.marshaller(FlushRowsResponse.getDefaultInstance())) + .setRequestMarshaller( + ProtoUtils.marshaller(Storage.FlushRowsRequest.getDefaultInstance())) + .setResponseMarshaller( + ProtoUtils.marshaller(Storage.FlushRowsResponse.getDefaultInstance())) .build(); - private final BackgroundResource backgroundResources; - - private final UnaryCallable createWriteStreamCallable; - private final BidiStreamingCallable appendRowsCallable; - private final UnaryCallable getWriteStreamCallable; - private final UnaryCallable + private final UnaryCallable + createWriteStreamCallable; + private final BidiStreamingCallable + appendRowsCallable; + private final UnaryCallable + getWriteStreamCallable; + private final UnaryCallable< + Storage.FinalizeWriteStreamRequest, Storage.FinalizeWriteStreamResponse> finalizeWriteStreamCallable; - private final UnaryCallable + private final UnaryCallable< + Storage.BatchCommitWriteStreamsRequest, Storage.BatchCommitWriteStreamsResponse> batchCommitWriteStreamsCallable; - private final UnaryCallable flushRowsCallable; + private final UnaryCallable + flushRowsCallable; + private final BackgroundResource backgroundResources; + private final GrpcOperationsStub operationsStub; private final GrpcStubCallableFactory callableFactory; public static final GrpcBigQueryWriteStub create(BigQueryWriteStubSettings settings) @@ -163,79 +173,99 @@ protected GrpcBigQueryWriteStub( GrpcStubCallableFactory callableFactory) throws IOException { this.callableFactory = callableFactory; + this.operationsStub = GrpcOperationsStub.create(clientContext, callableFactory); - GrpcCallSettings createWriteStreamTransportSettings = - GrpcCallSettings.newBuilder() - .setMethodDescriptor(createWriteStreamMethodDescriptor) - .setParamsExtractor( - new RequestParamsExtractor() { - @Override - public Map extract(CreateWriteStreamRequest request) { - ImmutableMap.Builder params = ImmutableMap.builder(); - params.put("parent", String.valueOf(request.getParent())); - return params.build(); - } - }) - .build(); - GrpcCallSettings appendRowsTransportSettings = - GrpcCallSettings.newBuilder() - .setMethodDescriptor(appendRowsMethodDescriptor) - .build(); - GrpcCallSettings getWriteStreamTransportSettings = - GrpcCallSettings.newBuilder() - .setMethodDescriptor(getWriteStreamMethodDescriptor) - .setParamsExtractor( - new RequestParamsExtractor() { - @Override - public Map extract(GetWriteStreamRequest request) { - ImmutableMap.Builder params = ImmutableMap.builder(); - params.put("name", String.valueOf(request.getName())); - return params.build(); - } - }) - .build(); - GrpcCallSettings + GrpcCallSettings + createWriteStreamTransportSettings = + GrpcCallSettings.newBuilder() + .setMethodDescriptor(createWriteStreamMethodDescriptor) + .setParamsExtractor( + new RequestParamsExtractor() { + @Override + public Map extract(Storage.CreateWriteStreamRequest request) { + ImmutableMap.Builder params = ImmutableMap.builder(); + params.put("parent", String.valueOf(request.getParent())); + return params.build(); + } + }) + .build(); + GrpcCallSettings + appendRowsTransportSettings = + GrpcCallSettings.newBuilder() + .setMethodDescriptor(appendRowsMethodDescriptor) + .setParamsExtractor( + new RequestParamsExtractor() { + @Override + public Map extract(Storage.AppendRowsRequest request) { + ImmutableMap.Builder params = ImmutableMap.builder(); + params.put("write_stream", String.valueOf(request.getWriteStream())); + return params.build(); + } + }) + .build(); + GrpcCallSettings + getWriteStreamTransportSettings = + GrpcCallSettings.newBuilder() + .setMethodDescriptor(getWriteStreamMethodDescriptor) + .setParamsExtractor( + new RequestParamsExtractor() { + @Override + public Map extract(Storage.GetWriteStreamRequest request) { + ImmutableMap.Builder params = ImmutableMap.builder(); + params.put("name", String.valueOf(request.getName())); + return params.build(); + } + }) + .build(); + GrpcCallSettings finalizeWriteStreamTransportSettings = - GrpcCallSettings.newBuilder() + GrpcCallSettings + . + newBuilder() .setMethodDescriptor(finalizeWriteStreamMethodDescriptor) .setParamsExtractor( - new RequestParamsExtractor() { + new RequestParamsExtractor() { @Override - public Map extract(FinalizeWriteStreamRequest request) { + public Map extract( + Storage.FinalizeWriteStreamRequest request) { ImmutableMap.Builder params = ImmutableMap.builder(); params.put("name", String.valueOf(request.getName())); return params.build(); } }) .build(); - GrpcCallSettings + GrpcCallSettings< + Storage.BatchCommitWriteStreamsRequest, Storage.BatchCommitWriteStreamsResponse> batchCommitWriteStreamsTransportSettings = GrpcCallSettings - .newBuilder() + . + newBuilder() .setMethodDescriptor(batchCommitWriteStreamsMethodDescriptor) .setParamsExtractor( - new RequestParamsExtractor() { + new RequestParamsExtractor() { @Override - public Map extract(BatchCommitWriteStreamsRequest request) { + public Map extract( + Storage.BatchCommitWriteStreamsRequest request) { ImmutableMap.Builder params = ImmutableMap.builder(); params.put("parent", String.valueOf(request.getParent())); return params.build(); } }) .build(); - GrpcCallSettings flushRowsTransportSettings = - GrpcCallSettings.newBuilder() - .setMethodDescriptor(flushRowsMethodDescriptor) - .setParamsExtractor( - new RequestParamsExtractor() { - @Override - public Map extract(FlushRowsRequest request) { - ImmutableMap.Builder params = ImmutableMap.builder(); - params.put("write_stream", String.valueOf(request.getWriteStream())); - return params.build(); - } - }) - .build(); + GrpcCallSettings + flushRowsTransportSettings = + GrpcCallSettings.newBuilder() + .setMethodDescriptor(flushRowsMethodDescriptor) + .setParamsExtractor( + new RequestParamsExtractor() { + @Override + public Map extract(Storage.FlushRowsRequest request) { + ImmutableMap.Builder params = ImmutableMap.builder(); + params.put("write_stream", String.valueOf(request.getWriteStream())); + return params.build(); + } + }) + .build(); this.createWriteStreamCallable = callableFactory.createUnaryCallable( @@ -262,32 +292,40 @@ public Map extract(FlushRowsRequest request) { callableFactory.createUnaryCallable( flushRowsTransportSettings, settings.flushRowsSettings(), clientContext); - backgroundResources = new BackgroundResourceAggregation(clientContext.getBackgroundResources()); + this.backgroundResources = + new BackgroundResourceAggregation(clientContext.getBackgroundResources()); + } + + public GrpcOperationsStub getOperationsStub() { + return operationsStub; } - public UnaryCallable createWriteStreamCallable() { + public UnaryCallable + createWriteStreamCallable() { return createWriteStreamCallable; } - public BidiStreamingCallable appendRowsCallable() { + public BidiStreamingCallable + appendRowsCallable() { return appendRowsCallable; } - public UnaryCallable getWriteStreamCallable() { + public UnaryCallable getWriteStreamCallable() { return getWriteStreamCallable; } - public UnaryCallable + public UnaryCallable finalizeWriteStreamCallable() { return finalizeWriteStreamCallable; } - public UnaryCallable + public UnaryCallable< + Storage.BatchCommitWriteStreamsRequest, Storage.BatchCommitWriteStreamsResponse> batchCommitWriteStreamsCallable() { return batchCommitWriteStreamsCallable; } - public UnaryCallable flushRowsCallable() { + public UnaryCallable flushRowsCallable() { return flushRowsCallable; } diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta1/BaseBigQueryStorageClient.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta1/BaseBigQueryStorageClient.java index 8f337335b5..0eb7ed9ea1 100644 --- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta1/BaseBigQueryStorageClient.java +++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta1/BaseBigQueryStorageClient.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,23 +13,13 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.bigquery.storage.v1beta1; import com.google.api.core.BetaApi; import com.google.api.gax.core.BackgroundResource; import com.google.api.gax.rpc.ServerStreamingCallable; import com.google.api.gax.rpc.UnaryCallable; -import com.google.cloud.bigquery.storage.v1beta1.Storage.BatchCreateReadSessionStreamsRequest; -import com.google.cloud.bigquery.storage.v1beta1.Storage.BatchCreateReadSessionStreamsResponse; -import com.google.cloud.bigquery.storage.v1beta1.Storage.CreateReadSessionRequest; -import com.google.cloud.bigquery.storage.v1beta1.Storage.FinalizeStreamRequest; -import com.google.cloud.bigquery.storage.v1beta1.Storage.ReadRowsRequest; -import com.google.cloud.bigquery.storage.v1beta1.Storage.ReadRowsResponse; -import com.google.cloud.bigquery.storage.v1beta1.Storage.ReadSession; -import com.google.cloud.bigquery.storage.v1beta1.Storage.SplitReadStreamRequest; -import com.google.cloud.bigquery.storage.v1beta1.Storage.SplitReadStreamResponse; -import com.google.cloud.bigquery.storage.v1beta1.Storage.Stream; -import com.google.cloud.bigquery.storage.v1beta1.TableReferenceProto.TableReference; import com.google.cloud.bigquery.storage.v1beta1.stub.BigQueryStorageStub; import com.google.cloud.bigquery.storage.v1beta1.stub.BigQueryStorageStubSettings; import com.google.protobuf.Empty; @@ -37,7 +27,7 @@ import java.util.concurrent.TimeUnit; import javax.annotation.Generated; -// AUTO-GENERATED DOCUMENTATION AND SERVICE +// AUTO-GENERATED DOCUMENTATION AND CLASS. /** * Service Description: BigQuery storage API. * @@ -46,18 +36,7 @@ *

This class provides the ability to make remote calls to the backing service through method * calls that map to API methods. Sample code to get started: * - *

- * 
- * try (BaseBigQueryStorageClient baseBigQueryStorageClient = BaseBigQueryStorageClient.create()) {
- *   TableReference tableReference = TableReference.newBuilder().build();
- *   ProjectName parent = ProjectName.of("[PROJECT]");
- *   int requestedStreams = 0;
- *   ReadSession response = baseBigQueryStorageClient.createReadSession(tableReference, parent, requestedStreams);
- * }
- * 
- * 
- * - *

Note: close() needs to be called on the baseBigQueryStorageClient object to clean up resources + *

Note: close() needs to be called on the BaseBigQueryStorageClient object to clean up resources * such as threads. In the example above, try-with-resources is used, which automatically calls * close(). * @@ -86,30 +65,28 @@ * *

To customize credentials: * - *

- * 
+ * 
{@code
  * BaseBigQueryStorageSettings baseBigQueryStorageSettings =
  *     BaseBigQueryStorageSettings.newBuilder()
  *         .setCredentialsProvider(FixedCredentialsProvider.create(myCredentials))
  *         .build();
  * BaseBigQueryStorageClient baseBigQueryStorageClient =
  *     BaseBigQueryStorageClient.create(baseBigQueryStorageSettings);
- * 
- * 
+ * }
* - * To customize the endpoint: + *

To customize the endpoint: * - *

- * 
+ * 
{@code
  * BaseBigQueryStorageSettings baseBigQueryStorageSettings =
  *     BaseBigQueryStorageSettings.newBuilder().setEndpoint(myEndpoint).build();
  * BaseBigQueryStorageClient baseBigQueryStorageClient =
  *     BaseBigQueryStorageClient.create(baseBigQueryStorageSettings);
- * 
- * 
+ * }
+ * + *

Please refer to the GitHub repository's samples for more quickstart code snippets. */ -@Generated("by gapic-generator") @BetaApi +@Generated("by gapic-generator") public class BaseBigQueryStorageClient implements BackgroundResource { private final BaseBigQueryStorageSettings settings; private final BigQueryStorageStub stub; @@ -130,7 +107,7 @@ public static final BaseBigQueryStorageClient create(BaseBigQueryStorageSettings /** * Constructs an instance of BaseBigQueryStorageClient, using the given stub for making calls. - * This is for advanced usage - prefer to use BaseBigQueryStorageSettings}. + * This is for advanced usage - prefer using create(BaseBigQueryStorageSettings). */ @BetaApi("A restructuring of stub classes is planned, so this may break in the future") public static final BaseBigQueryStorageClient create(BigQueryStorageStub stub) { @@ -162,7 +139,7 @@ public BigQueryStorageStub getStub() { return stub; } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Creates a new read session. A read session divides the contents of a BigQuery table into one or * more streams, which can then be used to read data from the table. The read session also @@ -175,17 +152,6 @@ public BigQueryStorageStub getStub() { *

Read sessions automatically expire 24 hours after they are created and do not require manual * clean-up by the caller. * - *

Sample code: - * - *


-   * try (BaseBigQueryStorageClient baseBigQueryStorageClient = BaseBigQueryStorageClient.create()) {
-   *   TableReference tableReference = TableReference.newBuilder().build();
-   *   ProjectName parent = ProjectName.of("[PROJECT]");
-   *   int requestedStreams = 0;
-   *   ReadSession response = baseBigQueryStorageClient.createReadSession(tableReference, parent, requestedStreams);
-   * }
-   * 
- * * @param tableReference Required. Reference to the table to read. * @param parent Required. String of the form `projects/{project_id}` indicating the project this * ReadSession is associated with. This is the project that will be billed for usage. @@ -196,10 +162,10 @@ public BigQueryStorageStub getStub() { *

Streams must be read starting from offset 0. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ - public final ReadSession createReadSession( - TableReference tableReference, ProjectName parent, int requestedStreams) { - CreateReadSessionRequest request = - CreateReadSessionRequest.newBuilder() + public final Storage.ReadSession createReadSession( + TableReferenceProto.TableReference tableReference, ProjectName parent, int requestedStreams) { + Storage.CreateReadSessionRequest request = + Storage.CreateReadSessionRequest.newBuilder() .setTableReference(tableReference) .setParent(parent == null ? null : parent.toString()) .setRequestedStreams(requestedStreams) @@ -207,7 +173,7 @@ public final ReadSession createReadSession( return createReadSession(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Creates a new read session. A read session divides the contents of a BigQuery table into one or * more streams, which can then be used to read data from the table. The read session also @@ -220,17 +186,6 @@ public final ReadSession createReadSession( *

Read sessions automatically expire 24 hours after they are created and do not require manual * clean-up by the caller. * - *

Sample code: - * - *


-   * try (BaseBigQueryStorageClient baseBigQueryStorageClient = BaseBigQueryStorageClient.create()) {
-   *   TableReference tableReference = TableReference.newBuilder().build();
-   *   ProjectName parent = ProjectName.of("[PROJECT]");
-   *   int requestedStreams = 0;
-   *   ReadSession response = baseBigQueryStorageClient.createReadSession(tableReference, parent.toString(), requestedStreams);
-   * }
-   * 
- * * @param tableReference Required. Reference to the table to read. * @param parent Required. String of the form `projects/{project_id}` indicating the project this * ReadSession is associated with. This is the project that will be billed for usage. @@ -241,10 +196,10 @@ public final ReadSession createReadSession( *

Streams must be read starting from offset 0. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ - public final ReadSession createReadSession( - TableReference tableReference, String parent, int requestedStreams) { - CreateReadSessionRequest request = - CreateReadSessionRequest.newBuilder() + public final Storage.ReadSession createReadSession( + TableReferenceProto.TableReference tableReference, String parent, int requestedStreams) { + Storage.CreateReadSessionRequest request = + Storage.CreateReadSessionRequest.newBuilder() .setTableReference(tableReference) .setParent(parent) .setRequestedStreams(requestedStreams) @@ -252,7 +207,7 @@ public final ReadSession createReadSession( return createReadSession(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Creates a new read session. A read session divides the contents of a BigQuery table into one or * more streams, which can then be used to read data from the table. The read session also @@ -265,28 +220,14 @@ public final ReadSession createReadSession( *

Read sessions automatically expire 24 hours after they are created and do not require manual * clean-up by the caller. * - *

Sample code: - * - *


-   * try (BaseBigQueryStorageClient baseBigQueryStorageClient = BaseBigQueryStorageClient.create()) {
-   *   TableReference tableReference = TableReference.newBuilder().build();
-   *   ProjectName parent = ProjectName.of("[PROJECT]");
-   *   CreateReadSessionRequest request = CreateReadSessionRequest.newBuilder()
-   *     .setTableReference(tableReference)
-   *     .setParent(parent.toString())
-   *     .build();
-   *   ReadSession response = baseBigQueryStorageClient.createReadSession(request);
-   * }
-   * 
- * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ - public final ReadSession createReadSession(CreateReadSessionRequest request) { + public final Storage.ReadSession createReadSession(Storage.CreateReadSessionRequest request) { return createReadSessionCallable().call(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Creates a new read session. A read session divides the contents of a BigQuery table into one or * more streams, which can then be used to read data from the table. The read session also @@ -300,26 +241,13 @@ public final ReadSession createReadSession(CreateReadSessionRequest request) { * clean-up by the caller. * *

Sample code: - * - *


-   * try (BaseBigQueryStorageClient baseBigQueryStorageClient = BaseBigQueryStorageClient.create()) {
-   *   TableReference tableReference = TableReference.newBuilder().build();
-   *   ProjectName parent = ProjectName.of("[PROJECT]");
-   *   CreateReadSessionRequest request = CreateReadSessionRequest.newBuilder()
-   *     .setTableReference(tableReference)
-   *     .setParent(parent.toString())
-   *     .build();
-   *   ApiFuture<ReadSession> future = baseBigQueryStorageClient.createReadSessionCallable().futureCall(request);
-   *   // Do something
-   *   ReadSession response = future.get();
-   * }
-   * 
*/ - public final UnaryCallable createReadSessionCallable() { + public final UnaryCallable + createReadSessionCallable() { return stub.createReadSessionCallable(); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Reads rows from the table in the format prescribed by the read session. Each response contains * one or more table rows, up to a maximum of 10 MiB per response; read requests which attempt to @@ -331,111 +259,61 @@ public final UnaryCallable createReadSess * data. * *

Sample code: - * - *


-   * try (BaseBigQueryStorageClient baseBigQueryStorageClient = BaseBigQueryStorageClient.create()) {
-   *   StreamPosition readPosition = StreamPosition.newBuilder().build();
-   *   ReadRowsRequest request = ReadRowsRequest.newBuilder()
-   *     .setReadPosition(readPosition)
-   *     .build();
-   *
-   *   ServerStream<ReadRowsResponse> stream = baseBigQueryStorageClient.readRowsCallable().call(request);
-   *   for (ReadRowsResponse response : stream) {
-   *     // Do something when receive a response
-   *   }
-   * }
-   * 
*/ - public final ServerStreamingCallable readRowsCallable() { + public final ServerStreamingCallable + readRowsCallable() { return stub.readRowsCallable(); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Creates additional streams for a ReadSession. This API can be used to dynamically adjust the * parallelism of a batch processing task upwards by adding additional workers. * - *

Sample code: - * - *


-   * try (BaseBigQueryStorageClient baseBigQueryStorageClient = BaseBigQueryStorageClient.create()) {
-   *   ReadSession session = ReadSession.newBuilder().build();
-   *   int requestedStreams = 0;
-   *   BatchCreateReadSessionStreamsResponse response = baseBigQueryStorageClient.batchCreateReadSessionStreams(session, requestedStreams);
-   * }
-   * 
- * * @param session Required. Must be a non-expired session obtained from a call to * CreateReadSession. Only the name field needs to be set. * @param requestedStreams Required. Number of new streams requested. Must be positive. Number of * added streams may be less than this, see CreateReadSessionRequest for more information. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ - public final BatchCreateReadSessionStreamsResponse batchCreateReadSessionStreams( - ReadSession session, int requestedStreams) { - BatchCreateReadSessionStreamsRequest request = - BatchCreateReadSessionStreamsRequest.newBuilder() + public final Storage.BatchCreateReadSessionStreamsResponse batchCreateReadSessionStreams( + Storage.ReadSession session, int requestedStreams) { + Storage.BatchCreateReadSessionStreamsRequest request = + Storage.BatchCreateReadSessionStreamsRequest.newBuilder() .setSession(session) .setRequestedStreams(requestedStreams) .build(); return batchCreateReadSessionStreams(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Creates additional streams for a ReadSession. This API can be used to dynamically adjust the * parallelism of a batch processing task upwards by adding additional workers. * - *

Sample code: - * - *


-   * try (BaseBigQueryStorageClient baseBigQueryStorageClient = BaseBigQueryStorageClient.create()) {
-   *   ReadSession session = ReadSession.newBuilder().build();
-   *   int requestedStreams = 0;
-   *   BatchCreateReadSessionStreamsRequest request = BatchCreateReadSessionStreamsRequest.newBuilder()
-   *     .setSession(session)
-   *     .setRequestedStreams(requestedStreams)
-   *     .build();
-   *   BatchCreateReadSessionStreamsResponse response = baseBigQueryStorageClient.batchCreateReadSessionStreams(request);
-   * }
-   * 
- * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ - public final BatchCreateReadSessionStreamsResponse batchCreateReadSessionStreams( - BatchCreateReadSessionStreamsRequest request) { + public final Storage.BatchCreateReadSessionStreamsResponse batchCreateReadSessionStreams( + Storage.BatchCreateReadSessionStreamsRequest request) { return batchCreateReadSessionStreamsCallable().call(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Creates additional streams for a ReadSession. This API can be used to dynamically adjust the * parallelism of a batch processing task upwards by adding additional workers. * *

Sample code: - * - *


-   * try (BaseBigQueryStorageClient baseBigQueryStorageClient = BaseBigQueryStorageClient.create()) {
-   *   ReadSession session = ReadSession.newBuilder().build();
-   *   int requestedStreams = 0;
-   *   BatchCreateReadSessionStreamsRequest request = BatchCreateReadSessionStreamsRequest.newBuilder()
-   *     .setSession(session)
-   *     .setRequestedStreams(requestedStreams)
-   *     .build();
-   *   ApiFuture<BatchCreateReadSessionStreamsResponse> future = baseBigQueryStorageClient.batchCreateReadSessionStreamsCallable().futureCall(request);
-   *   // Do something
-   *   BatchCreateReadSessionStreamsResponse response = future.get();
-   * }
-   * 
*/ public final UnaryCallable< - BatchCreateReadSessionStreamsRequest, BatchCreateReadSessionStreamsResponse> + Storage.BatchCreateReadSessionStreamsRequest, + Storage.BatchCreateReadSessionStreamsResponse> batchCreateReadSessionStreamsCallable() { return stub.batchCreateReadSessionStreamsCallable(); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Triggers the graceful termination of a single stream in a ReadSession. This API can be used to * dynamically adjust the parallelism of a batch processing task downwards without losing data. @@ -449,24 +327,16 @@ public final BatchCreateReadSessionStreamsResponse batchCreateReadSessionStreams *

This method will return an error if there are no other live streams in the Session, or if * SplitReadStream() has been called on the given Stream. * - *

Sample code: - * - *


-   * try (BaseBigQueryStorageClient baseBigQueryStorageClient = BaseBigQueryStorageClient.create()) {
-   *   Stream stream = Stream.newBuilder().build();
-   *   baseBigQueryStorageClient.finalizeStream(stream);
-   * }
-   * 
- * * @param stream Required. Stream to finalize. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ - public final void finalizeStream(Stream stream) { - FinalizeStreamRequest request = FinalizeStreamRequest.newBuilder().setStream(stream).build(); + public final void finalizeStream(Storage.Stream stream) { + Storage.FinalizeStreamRequest request = + Storage.FinalizeStreamRequest.newBuilder().setStream(stream).build(); finalizeStream(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Triggers the graceful termination of a single stream in a ReadSession. This API can be used to * dynamically adjust the parallelism of a batch processing task downwards without losing data. @@ -480,26 +350,14 @@ public final void finalizeStream(Stream stream) { *

This method will return an error if there are no other live streams in the Session, or if * SplitReadStream() has been called on the given Stream. * - *

Sample code: - * - *


-   * try (BaseBigQueryStorageClient baseBigQueryStorageClient = BaseBigQueryStorageClient.create()) {
-   *   Stream stream = Stream.newBuilder().build();
-   *   FinalizeStreamRequest request = FinalizeStreamRequest.newBuilder()
-   *     .setStream(stream)
-   *     .build();
-   *   baseBigQueryStorageClient.finalizeStream(request);
-   * }
-   * 
- * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ - public final void finalizeStream(FinalizeStreamRequest request) { + public final void finalizeStream(Storage.FinalizeStreamRequest request) { finalizeStreamCallable().call(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Triggers the graceful termination of a single stream in a ReadSession. This API can be used to * dynamically adjust the parallelism of a batch processing task downwards without losing data. @@ -514,24 +372,12 @@ public final void finalizeStream(FinalizeStreamRequest request) { * SplitReadStream() has been called on the given Stream. * *

Sample code: - * - *


-   * try (BaseBigQueryStorageClient baseBigQueryStorageClient = BaseBigQueryStorageClient.create()) {
-   *   Stream stream = Stream.newBuilder().build();
-   *   FinalizeStreamRequest request = FinalizeStreamRequest.newBuilder()
-   *     .setStream(stream)
-   *     .build();
-   *   ApiFuture<Void> future = baseBigQueryStorageClient.finalizeStreamCallable().futureCall(request);
-   *   // Do something
-   *   future.get();
-   * }
-   * 
*/ - public final UnaryCallable finalizeStreamCallable() { + public final UnaryCallable finalizeStreamCallable() { return stub.finalizeStreamCallable(); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Splits a given read stream into two Streams. These streams are referred to as the primary and * the residual of the split. The original stream can still be read from in the same manner as @@ -545,25 +391,16 @@ public final UnaryCallable finalizeStreamCallable( * *

This method is guaranteed to be idempotent. * - *

Sample code: - * - *


-   * try (BaseBigQueryStorageClient baseBigQueryStorageClient = BaseBigQueryStorageClient.create()) {
-   *   Stream originalStream = Stream.newBuilder().build();
-   *   SplitReadStreamResponse response = baseBigQueryStorageClient.splitReadStream(originalStream);
-   * }
-   * 
- * * @param originalStream Required. Stream to split. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ - public final SplitReadStreamResponse splitReadStream(Stream originalStream) { - SplitReadStreamRequest request = - SplitReadStreamRequest.newBuilder().setOriginalStream(originalStream).build(); + public final Storage.SplitReadStreamResponse splitReadStream(Storage.Stream originalStream) { + Storage.SplitReadStreamRequest request = + Storage.SplitReadStreamRequest.newBuilder().setOriginalStream(originalStream).build(); return splitReadStream(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Splits a given read stream into two Streams. These streams are referred to as the primary and * the residual of the split. The original stream can still be read from in the same manner as @@ -577,26 +414,15 @@ public final SplitReadStreamResponse splitReadStream(Stream originalStream) { * *

This method is guaranteed to be idempotent. * - *

Sample code: - * - *


-   * try (BaseBigQueryStorageClient baseBigQueryStorageClient = BaseBigQueryStorageClient.create()) {
-   *   Stream originalStream = Stream.newBuilder().build();
-   *   SplitReadStreamRequest request = SplitReadStreamRequest.newBuilder()
-   *     .setOriginalStream(originalStream)
-   *     .build();
-   *   SplitReadStreamResponse response = baseBigQueryStorageClient.splitReadStream(request);
-   * }
-   * 
- * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ - public final SplitReadStreamResponse splitReadStream(SplitReadStreamRequest request) { + public final Storage.SplitReadStreamResponse splitReadStream( + Storage.SplitReadStreamRequest request) { return splitReadStreamCallable().call(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Splits a given read stream into two Streams. These streams are referred to as the primary and * the residual of the split. The original stream can still be read from in the same manner as @@ -611,20 +437,8 @@ public final SplitReadStreamResponse splitReadStream(SplitReadStreamRequest requ *

This method is guaranteed to be idempotent. * *

Sample code: - * - *


-   * try (BaseBigQueryStorageClient baseBigQueryStorageClient = BaseBigQueryStorageClient.create()) {
-   *   Stream originalStream = Stream.newBuilder().build();
-   *   SplitReadStreamRequest request = SplitReadStreamRequest.newBuilder()
-   *     .setOriginalStream(originalStream)
-   *     .build();
-   *   ApiFuture<SplitReadStreamResponse> future = baseBigQueryStorageClient.splitReadStreamCallable().futureCall(request);
-   *   // Do something
-   *   SplitReadStreamResponse response = future.get();
-   * }
-   * 
*/ - public final UnaryCallable + public final UnaryCallable splitReadStreamCallable() { return stub.splitReadStreamCallable(); } diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta1/BaseBigQueryStorageSettings.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta1/BaseBigQueryStorageSettings.java index a1bb6b456e..73b6bf5729 100644 --- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta1/BaseBigQueryStorageSettings.java +++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta1/BaseBigQueryStorageSettings.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,6 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.bigquery.storage.v1beta1; import com.google.api.core.ApiFunction; @@ -26,22 +27,13 @@ import com.google.api.gax.rpc.ServerStreamingCallSettings; import com.google.api.gax.rpc.TransportChannelProvider; import com.google.api.gax.rpc.UnaryCallSettings; -import com.google.cloud.bigquery.storage.v1beta1.Storage.BatchCreateReadSessionStreamsRequest; -import com.google.cloud.bigquery.storage.v1beta1.Storage.BatchCreateReadSessionStreamsResponse; -import com.google.cloud.bigquery.storage.v1beta1.Storage.CreateReadSessionRequest; -import com.google.cloud.bigquery.storage.v1beta1.Storage.FinalizeStreamRequest; -import com.google.cloud.bigquery.storage.v1beta1.Storage.ReadRowsRequest; -import com.google.cloud.bigquery.storage.v1beta1.Storage.ReadRowsResponse; -import com.google.cloud.bigquery.storage.v1beta1.Storage.ReadSession; -import com.google.cloud.bigquery.storage.v1beta1.Storage.SplitReadStreamRequest; -import com.google.cloud.bigquery.storage.v1beta1.Storage.SplitReadStreamResponse; import com.google.cloud.bigquery.storage.v1beta1.stub.BigQueryStorageStubSettings; import com.google.protobuf.Empty; import java.io.IOException; import java.util.List; import javax.annotation.Generated; -// AUTO-GENERATED DOCUMENTATION AND CLASS +// AUTO-GENERATED DOCUMENTATION AND CLASS. /** * Settings class to configure an instance of {@link BaseBigQueryStorageClient}. * @@ -59,48 +51,53 @@ * *

For example, to set the total timeout of createReadSession to 30 seconds: * - *

- * 
+ * 
{@code
  * BaseBigQueryStorageSettings.Builder baseBigQueryStorageSettingsBuilder =
  *     BaseBigQueryStorageSettings.newBuilder();
  * baseBigQueryStorageSettingsBuilder
  *     .createReadSessionSettings()
  *     .setRetrySettings(
- *         baseBigQueryStorageSettingsBuilder.createReadSessionSettings().getRetrySettings().toBuilder()
+ *         baseBigQueryStorageSettingsBuilder
+ *             .createReadSessionSettings()
+ *             .getRetrySettings()
+ *             .toBuilder()
  *             .setTotalTimeout(Duration.ofSeconds(30))
  *             .build());
- * BaseBigQueryStorageSettings baseBigQueryStorageSettings = baseBigQueryStorageSettingsBuilder.build();
- * 
- * 
+ * BaseBigQueryStorageSettings baseBigQueryStorageSettings = + * baseBigQueryStorageSettingsBuilder.build(); + * }
*/ -@Generated("by gapic-generator") -@BetaApi +@Generated("by gapic-generator-java") public class BaseBigQueryStorageSettings extends ClientSettings { + /** Returns the object with the settings used for calls to createReadSession. */ - public UnaryCallSettings createReadSessionSettings() { + public UnaryCallSettings + createReadSessionSettings() { return ((BigQueryStorageStubSettings) getStubSettings()).createReadSessionSettings(); } /** Returns the object with the settings used for calls to readRows. */ - public ServerStreamingCallSettings readRowsSettings() { + public ServerStreamingCallSettings + readRowsSettings() { return ((BigQueryStorageStubSettings) getStubSettings()).readRowsSettings(); } /** Returns the object with the settings used for calls to batchCreateReadSessionStreams. */ public UnaryCallSettings< - BatchCreateReadSessionStreamsRequest, BatchCreateReadSessionStreamsResponse> + Storage.BatchCreateReadSessionStreamsRequest, + Storage.BatchCreateReadSessionStreamsResponse> batchCreateReadSessionStreamsSettings() { return ((BigQueryStorageStubSettings) getStubSettings()) .batchCreateReadSessionStreamsSettings(); } /** Returns the object with the settings used for calls to finalizeStream. */ - public UnaryCallSettings finalizeStreamSettings() { + public UnaryCallSettings finalizeStreamSettings() { return ((BigQueryStorageStubSettings) getStubSettings()).finalizeStreamSettings(); } /** Returns the object with the settings used for calls to splitReadStream. */ - public UnaryCallSettings + public UnaryCallSettings splitReadStreamSettings() { return ((BigQueryStorageStubSettings) getStubSettings()).splitReadStreamSettings(); } @@ -165,18 +162,15 @@ protected BaseBigQueryStorageSettings(Builder settingsBuilder) throws IOExceptio /** Builder for BaseBigQueryStorageSettings. */ public static class Builder extends ClientSettings.Builder { + protected Builder() throws IOException { - this((ClientContext) null); + this(((ClientContext) null)); } protected Builder(ClientContext clientContext) { super(BigQueryStorageStubSettings.newBuilder(clientContext)); } - private static Builder createDefault() { - return new Builder(BigQueryStorageStubSettings.newBuilder()); - } - protected Builder(BaseBigQueryStorageSettings settings) { super(settings.getStubSettings().toBuilder()); } @@ -185,11 +179,15 @@ protected Builder(BigQueryStorageStubSettings.Builder stubSettings) { super(stubSettings); } + private static Builder createDefault() { + return new Builder(BigQueryStorageStubSettings.newBuilder()); + } + public BigQueryStorageStubSettings.Builder getStubSettingsBuilder() { return ((BigQueryStorageStubSettings.Builder) getStubSettings()); } - // NEXT_MAJOR_VER: remove 'throws Exception' + // NEXT_MAJOR_VER: remove 'throws Exception'. /** * Applies the given settings updater function to all of the unary API methods in this service. * @@ -203,31 +201,34 @@ public Builder applyToAllUnaryMethods( } /** Returns the builder for the settings used for calls to createReadSession. */ - public UnaryCallSettings.Builder + public UnaryCallSettings.Builder createReadSessionSettings() { return getStubSettingsBuilder().createReadSessionSettings(); } /** Returns the builder for the settings used for calls to readRows. */ - public ServerStreamingCallSettings.Builder + public ServerStreamingCallSettings.Builder readRowsSettings() { return getStubSettingsBuilder().readRowsSettings(); } /** Returns the builder for the settings used for calls to batchCreateReadSessionStreams. */ public UnaryCallSettings.Builder< - BatchCreateReadSessionStreamsRequest, BatchCreateReadSessionStreamsResponse> + Storage.BatchCreateReadSessionStreamsRequest, + Storage.BatchCreateReadSessionStreamsResponse> batchCreateReadSessionStreamsSettings() { return getStubSettingsBuilder().batchCreateReadSessionStreamsSettings(); } /** Returns the builder for the settings used for calls to finalizeStream. */ - public UnaryCallSettings.Builder finalizeStreamSettings() { + public UnaryCallSettings.Builder + finalizeStreamSettings() { return getStubSettingsBuilder().finalizeStreamSettings(); } /** Returns the builder for the settings used for calls to splitReadStream. */ - public UnaryCallSettings.Builder + public UnaryCallSettings.Builder< + Storage.SplitReadStreamRequest, Storage.SplitReadStreamResponse> splitReadStreamSettings() { return getStubSettingsBuilder().splitReadStreamSettings(); } diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta1/package-info.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta1/package-info.java index 5c0d3b601e..eff4878eee 100644 --- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta1/package-info.java +++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta1/package-info.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,30 +15,17 @@ */ /** - * A client to BigQuery Storage API. + * The interfaces provided are listed below, along with usage samples. * - *

The interfaces provided are listed below, along with usage samples. - * - *

========================= BaseBigQueryStorageClient ========================= + *

======================= BigQueryStorageClient ======================= * *

Service Description: BigQuery storage API. * *

The BigQuery storage API can be used to read data stored in BigQuery. * - *

Sample for BaseBigQueryStorageClient: - * - *

- * 
- * try (BaseBigQueryStorageClient baseBigQueryStorageClient = BaseBigQueryStorageClient.create()) {
- *   TableReference tableReference = TableReference.newBuilder().build();
- *   ProjectName parent = ProjectName.of("[PROJECT]");
- *   int requestedStreams = 0;
- *   ReadSession response = baseBigQueryStorageClient.createReadSession(tableReference, parent, requestedStreams);
- * }
- * 
- * 
+ *

Sample for BigQueryStorageClient: */ -@Generated("by gapic-generator") +@Generated("by gapic-generator-java") package com.google.cloud.bigquery.storage.v1beta1; import javax.annotation.Generated; diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta1/stub/BigQueryStorageStub.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta1/stub/BigQueryStorageStub.java index 2d806771cb..d7f64bde10 100644 --- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta1/stub/BigQueryStorageStub.java +++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta1/stub/BigQueryStorageStub.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,53 +13,49 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.bigquery.storage.v1beta1.stub; -import com.google.api.core.BetaApi; import com.google.api.gax.core.BackgroundResource; import com.google.api.gax.rpc.ServerStreamingCallable; import com.google.api.gax.rpc.UnaryCallable; -import com.google.cloud.bigquery.storage.v1beta1.Storage.BatchCreateReadSessionStreamsRequest; -import com.google.cloud.bigquery.storage.v1beta1.Storage.BatchCreateReadSessionStreamsResponse; -import com.google.cloud.bigquery.storage.v1beta1.Storage.CreateReadSessionRequest; -import com.google.cloud.bigquery.storage.v1beta1.Storage.FinalizeStreamRequest; -import com.google.cloud.bigquery.storage.v1beta1.Storage.ReadRowsRequest; -import com.google.cloud.bigquery.storage.v1beta1.Storage.ReadRowsResponse; -import com.google.cloud.bigquery.storage.v1beta1.Storage.ReadSession; -import com.google.cloud.bigquery.storage.v1beta1.Storage.SplitReadStreamRequest; -import com.google.cloud.bigquery.storage.v1beta1.Storage.SplitReadStreamResponse; +import com.google.cloud.bigquery.storage.v1beta1.Storage; import com.google.protobuf.Empty; import javax.annotation.Generated; -// AUTO-GENERATED DOCUMENTATION AND CLASS +// AUTO-GENERATED DOCUMENTATION AND CLASS. /** - * Base stub class for BigQuery Storage API. + * Base stub class for the BigQueryStorage service API. * *

This class is for advanced usage and reflects the underlying API directly. */ @Generated("by gapic-generator") -@BetaApi("A restructuring of stub classes is planned, so this may break in the future") public abstract class BigQueryStorageStub implements BackgroundResource { - public UnaryCallable createReadSessionCallable() { + public UnaryCallable + createReadSessionCallable() { throw new UnsupportedOperationException("Not implemented: createReadSessionCallable()"); } - public ServerStreamingCallable readRowsCallable() { + public ServerStreamingCallable + readRowsCallable() { throw new UnsupportedOperationException("Not implemented: readRowsCallable()"); } - public UnaryCallable + public UnaryCallable< + Storage.BatchCreateReadSessionStreamsRequest, + Storage.BatchCreateReadSessionStreamsResponse> batchCreateReadSessionStreamsCallable() { throw new UnsupportedOperationException( "Not implemented: batchCreateReadSessionStreamsCallable()"); } - public UnaryCallable finalizeStreamCallable() { + public UnaryCallable finalizeStreamCallable() { throw new UnsupportedOperationException("Not implemented: finalizeStreamCallable()"); } - public UnaryCallable splitReadStreamCallable() { + public UnaryCallable + splitReadStreamCallable() { throw new UnsupportedOperationException("Not implemented: splitReadStreamCallable()"); } diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta1/stub/BigQueryStorageStubSettings.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta1/stub/BigQueryStorageStubSettings.java index 1cf3ac0ccc..5225c25b98 100644 --- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta1/stub/BigQueryStorageStubSettings.java +++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta1/stub/BigQueryStorageStubSettings.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,6 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.bigquery.storage.v1beta1.stub; import com.google.api.core.ApiFunction; @@ -31,15 +32,7 @@ import com.google.api.gax.rpc.StubSettings; import com.google.api.gax.rpc.TransportChannelProvider; import com.google.api.gax.rpc.UnaryCallSettings; -import com.google.cloud.bigquery.storage.v1beta1.Storage.BatchCreateReadSessionStreamsRequest; -import com.google.cloud.bigquery.storage.v1beta1.Storage.BatchCreateReadSessionStreamsResponse; -import com.google.cloud.bigquery.storage.v1beta1.Storage.CreateReadSessionRequest; -import com.google.cloud.bigquery.storage.v1beta1.Storage.FinalizeStreamRequest; -import com.google.cloud.bigquery.storage.v1beta1.Storage.ReadRowsRequest; -import com.google.cloud.bigquery.storage.v1beta1.Storage.ReadRowsResponse; -import com.google.cloud.bigquery.storage.v1beta1.Storage.ReadSession; -import com.google.cloud.bigquery.storage.v1beta1.Storage.SplitReadStreamRequest; -import com.google.cloud.bigquery.storage.v1beta1.Storage.SplitReadStreamResponse; +import com.google.cloud.bigquery.storage.v1beta1.Storage; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; @@ -50,7 +43,7 @@ import javax.annotation.Generated; import org.threeten.bp.Duration; -// AUTO-GENERATED DOCUMENTATION AND CLASS +// AUTO-GENERATED DOCUMENTATION AND CLASS. /** * Settings class to configure an instance of {@link BigQueryStorageStub}. * @@ -68,22 +61,23 @@ * *

For example, to set the total timeout of createReadSession to 30 seconds: * - *

- * 
- * BigQueryStorageStubSettings.Builder baseBigQueryStorageSettingsBuilder =
+ * 
{@code
+ * BigQueryStorageStubSettings.Builder bigQueryStorageSettingsBuilder =
  *     BigQueryStorageStubSettings.newBuilder();
- * baseBigQueryStorageSettingsBuilder
+ * bigQueryStorageSettingsBuilder
  *     .createReadSessionSettings()
  *     .setRetrySettings(
- *         baseBigQueryStorageSettingsBuilder.createReadSessionSettings().getRetrySettings().toBuilder()
+ *         bigQueryStorageSettingsBuilder
+ *             .createReadSessionSettings()
+ *             .getRetrySettings()
+ *             .toBuilder()
  *             .setTotalTimeout(Duration.ofSeconds(30))
  *             .build());
- * BigQueryStorageStubSettings baseBigQueryStorageSettings = baseBigQueryStorageSettingsBuilder.build();
- * 
- * 
+ * BigQueryStorageStubSettings bigQueryStorageSettings = bigQueryStorageSettingsBuilder.build(); + * }
*/ -@Generated("by gapic-generator") @BetaApi +@Generated("by gapic-generator-java") public class BigQueryStorageStubSettings extends StubSettings { /** The default scopes of the service. */ private static final ImmutableList DEFAULT_SERVICE_SCOPES = @@ -93,39 +87,45 @@ public class BigQueryStorageStubSettings extends StubSettings createReadSessionSettings; - private final ServerStreamingCallSettings readRowsSettings; + private final UnaryCallSettings + createReadSessionSettings; + private final ServerStreamingCallSettings + readRowsSettings; private final UnaryCallSettings< - BatchCreateReadSessionStreamsRequest, BatchCreateReadSessionStreamsResponse> + Storage.BatchCreateReadSessionStreamsRequest, + Storage.BatchCreateReadSessionStreamsResponse> batchCreateReadSessionStreamsSettings; - private final UnaryCallSettings finalizeStreamSettings; - private final UnaryCallSettings + private final UnaryCallSettings finalizeStreamSettings; + private final UnaryCallSettings splitReadStreamSettings; /** Returns the object with the settings used for calls to createReadSession. */ - public UnaryCallSettings createReadSessionSettings() { + public UnaryCallSettings + createReadSessionSettings() { return createReadSessionSettings; } /** Returns the object with the settings used for calls to readRows. */ - public ServerStreamingCallSettings readRowsSettings() { + public ServerStreamingCallSettings + readRowsSettings() { return readRowsSettings; } /** Returns the object with the settings used for calls to batchCreateReadSessionStreams. */ public UnaryCallSettings< - BatchCreateReadSessionStreamsRequest, BatchCreateReadSessionStreamsResponse> + Storage.BatchCreateReadSessionStreamsRequest, + Storage.BatchCreateReadSessionStreamsResponse> batchCreateReadSessionStreamsSettings() { return batchCreateReadSessionStreamsSettings; } /** Returns the object with the settings used for calls to finalizeStream. */ - public UnaryCallSettings finalizeStreamSettings() { + public UnaryCallSettings finalizeStreamSettings() { return finalizeStreamSettings; } /** Returns the object with the settings used for calls to splitReadStream. */ - public UnaryCallSettings + public UnaryCallSettings splitReadStreamSettings() { return splitReadStreamSettings; } @@ -136,10 +136,10 @@ public BigQueryStorageStub createStub() throws IOException { .getTransportName() .equals(GrpcTransportChannel.getGrpcTransportName())) { return GrpcBigQueryStorageStub.create(this); - } else { - throw new UnsupportedOperationException( - "Transport not supported: " + getTransportChannelProvider().getTransportName()); } + throw new UnsupportedOperationException( + String.format( + "Transport not supported: %s", getTransportChannelProvider().getTransportName())); } /** Returns a builder for the default ExecutorProvider for this service. */ @@ -210,18 +210,20 @@ protected BigQueryStorageStubSettings(Builder settingsBuilder) throws IOExceptio /** Builder for BigQueryStorageStubSettings. */ public static class Builder extends StubSettings.Builder { private final ImmutableList> unaryMethodSettingsBuilders; - - private final UnaryCallSettings.Builder + private final UnaryCallSettings.Builder createReadSessionSettings; - private final ServerStreamingCallSettings.Builder + private final ServerStreamingCallSettings.Builder< + Storage.ReadRowsRequest, Storage.ReadRowsResponse> readRowsSettings; private final UnaryCallSettings.Builder< - BatchCreateReadSessionStreamsRequest, BatchCreateReadSessionStreamsResponse> + Storage.BatchCreateReadSessionStreamsRequest, + Storage.BatchCreateReadSessionStreamsResponse> batchCreateReadSessionStreamsSettings; - private final UnaryCallSettings.Builder finalizeStreamSettings; - private final UnaryCallSettings.Builder + private final UnaryCallSettings.Builder + finalizeStreamSettings; + private final UnaryCallSettings.Builder< + Storage.SplitReadStreamRequest, Storage.SplitReadStreamResponse> splitReadStreamSettings; - private static final ImmutableMap> RETRYABLE_CODE_DEFINITIONS; @@ -229,19 +231,18 @@ public static class Builder extends StubSettings.Builder> definitions = ImmutableMap.builder(); definitions.put( - "retry_policy_1_codes", + "retry_policy_0_codes", ImmutableSet.copyOf( Lists.newArrayList( StatusCode.Code.DEADLINE_EXCEEDED, StatusCode.Code.UNAVAILABLE))); - definitions.put("no_retry_codes", ImmutableSet.copyOf(Lists.newArrayList())); definitions.put( - "retry_policy_3_codes", + "retry_policy_1_codes", + ImmutableSet.copyOf(Lists.newArrayList(StatusCode.Code.UNAVAILABLE))); + definitions.put( + "retry_policy_2_codes", ImmutableSet.copyOf( Lists.newArrayList( StatusCode.Code.DEADLINE_EXCEEDED, StatusCode.Code.UNAVAILABLE))); - definitions.put( - "retry_policy_2_codes", - ImmutableSet.copyOf(Lists.newArrayList(StatusCode.Code.UNAVAILABLE))); RETRYABLE_CODE_DEFINITIONS = definitions.build(); } @@ -260,7 +261,7 @@ public static class Builder extends StubSettings.Builder>of( + createReadSessionSettings, + batchCreateReadSessionStreamsSettings, + finalizeStreamSettings, + splitReadStreamSettings); + } + private static Builder createDefault() { - Builder builder = new Builder((ClientContext) null); + Builder builder = new Builder(((ClientContext) null)); + builder.setTransportChannelProvider(defaultTransportChannelProvider()); builder.setCredentialsProvider(defaultCredentialsProviderBuilder().build()); builder.setInternalHeaderProvider(defaultApiClientHeaderProviderBuilder().build()); builder.setEndpoint(getDefaultEndpoint()); + return initDefaults(builder); } private static Builder initDefaults(Builder builder) { - builder .createReadSessionSettings() - .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_1_codes")) - .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_1_params")); + .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) + .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")); builder .readRowsSettings() - .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_2_codes")) - .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_2_params")); + .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_1_codes")) + .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_1_params")); builder .batchCreateReadSessionStreamsSettings() - .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_3_codes")) - .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_3_params")); + .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_2_codes")) + .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_2_params")); builder .finalizeStreamSettings() - .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_3_codes")) - .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_3_params")); + .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_2_codes")) + .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_2_params")); builder .splitReadStreamSettings() - .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_3_codes")) - .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_3_params")); + .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_2_codes")) + .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_2_params")); return builder; } - protected Builder(BigQueryStorageStubSettings settings) { - super(settings); - - createReadSessionSettings = settings.createReadSessionSettings.toBuilder(); - readRowsSettings = settings.readRowsSettings.toBuilder(); - batchCreateReadSessionStreamsSettings = - settings.batchCreateReadSessionStreamsSettings.toBuilder(); - finalizeStreamSettings = settings.finalizeStreamSettings.toBuilder(); - splitReadStreamSettings = settings.splitReadStreamSettings.toBuilder(); - - unaryMethodSettingsBuilders = - ImmutableList.>of( - createReadSessionSettings, - batchCreateReadSessionStreamsSettings, - finalizeStreamSettings, - splitReadStreamSettings); - } - - // NEXT_MAJOR_VER: remove 'throws Exception' + // NEXT_MAJOR_VER: remove 'throws Exception'. /** * Applies the given settings updater function to all of the unary API methods in this service. * @@ -389,31 +384,34 @@ public Builder applyToAllUnaryMethods( } /** Returns the builder for the settings used for calls to createReadSession. */ - public UnaryCallSettings.Builder + public UnaryCallSettings.Builder createReadSessionSettings() { return createReadSessionSettings; } /** Returns the builder for the settings used for calls to readRows. */ - public ServerStreamingCallSettings.Builder + public ServerStreamingCallSettings.Builder readRowsSettings() { return readRowsSettings; } /** Returns the builder for the settings used for calls to batchCreateReadSessionStreams. */ public UnaryCallSettings.Builder< - BatchCreateReadSessionStreamsRequest, BatchCreateReadSessionStreamsResponse> + Storage.BatchCreateReadSessionStreamsRequest, + Storage.BatchCreateReadSessionStreamsResponse> batchCreateReadSessionStreamsSettings() { return batchCreateReadSessionStreamsSettings; } /** Returns the builder for the settings used for calls to finalizeStream. */ - public UnaryCallSettings.Builder finalizeStreamSettings() { + public UnaryCallSettings.Builder + finalizeStreamSettings() { return finalizeStreamSettings; } /** Returns the builder for the settings used for calls to splitReadStream. */ - public UnaryCallSettings.Builder + public UnaryCallSettings.Builder< + Storage.SplitReadStreamRequest, Storage.SplitReadStreamResponse> splitReadStreamSettings() { return splitReadStreamSettings; } diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta1/stub/GrpcBigQueryStorageCallableFactory.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta1/stub/GrpcBigQueryStorageCallableFactory.java index 16a6b42c5f..4cf9880b97 100644 --- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta1/stub/GrpcBigQueryStorageCallableFactory.java +++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta1/stub/GrpcBigQueryStorageCallableFactory.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,9 +13,9 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.bigquery.storage.v1beta1.stub; -import com.google.api.core.BetaApi; import com.google.api.gax.grpc.GrpcCallSettings; import com.google.api.gax.grpc.GrpcCallableFactory; import com.google.api.gax.grpc.GrpcStubCallableFactory; @@ -31,18 +31,19 @@ import com.google.api.gax.rpc.StreamingCallSettings; import com.google.api.gax.rpc.UnaryCallSettings; import com.google.api.gax.rpc.UnaryCallable; +import com.google.longrunning.Operation; import com.google.longrunning.stub.OperationsStub; import javax.annotation.Generated; -// AUTO-GENERATED DOCUMENTATION AND CLASS +// AUTO-GENERATED DOCUMENTATION AND CLASS. /** - * gRPC callable factory implementation for BigQuery Storage API. + * gRPC callable factory implementation for the BigQueryStorage service API. * *

This class is for advanced usage. */ @Generated("by gapic-generator") -@BetaApi("The surface for use by generated code is not stable yet and may change in the future.") public class GrpcBigQueryStorageCallableFactory implements GrpcStubCallableFactory { + @Override public UnaryCallable createUnaryCallable( GrpcCallSettings grpcCallSettings, @@ -55,61 +56,58 @@ public UnaryCallable createUnaryCalla public UnaryCallable createPagedCallable( GrpcCallSettings grpcCallSettings, - PagedCallSettings pagedCallSettings, + PagedCallSettings callSettings, ClientContext clientContext) { - return GrpcCallableFactory.createPagedCallable( - grpcCallSettings, pagedCallSettings, clientContext); + return GrpcCallableFactory.createPagedCallable(grpcCallSettings, callSettings, clientContext); } @Override public UnaryCallable createBatchingCallable( GrpcCallSettings grpcCallSettings, - BatchingCallSettings batchingCallSettings, + BatchingCallSettings callSettings, ClientContext clientContext) { return GrpcCallableFactory.createBatchingCallable( - grpcCallSettings, batchingCallSettings, clientContext); + grpcCallSettings, callSettings, clientContext); } - @BetaApi( - "The surface for long-running operations is not stable yet and may change in the future.") @Override public OperationCallable createOperationCallable( - GrpcCallSettings grpcCallSettings, - OperationCallSettings operationCallSettings, + GrpcCallSettings grpcCallSettings, + OperationCallSettings callSettings, ClientContext clientContext, OperationsStub operationsStub) { return GrpcCallableFactory.createOperationCallable( - grpcCallSettings, operationCallSettings, clientContext, operationsStub); + grpcCallSettings, callSettings, clientContext, operationsStub); } @Override public BidiStreamingCallable createBidiStreamingCallable( GrpcCallSettings grpcCallSettings, - StreamingCallSettings streamingCallSettings, + StreamingCallSettings callSettings, ClientContext clientContext) { return GrpcCallableFactory.createBidiStreamingCallable( - grpcCallSettings, streamingCallSettings, clientContext); + grpcCallSettings, callSettings, clientContext); } @Override public ServerStreamingCallable createServerStreamingCallable( GrpcCallSettings grpcCallSettings, - ServerStreamingCallSettings streamingCallSettings, + ServerStreamingCallSettings callSettings, ClientContext clientContext) { return GrpcCallableFactory.createServerStreamingCallable( - grpcCallSettings, streamingCallSettings, clientContext); + grpcCallSettings, callSettings, clientContext); } @Override public ClientStreamingCallable createClientStreamingCallable( GrpcCallSettings grpcCallSettings, - StreamingCallSettings streamingCallSettings, + StreamingCallSettings callSettings, ClientContext clientContext) { return GrpcCallableFactory.createClientStreamingCallable( - grpcCallSettings, streamingCallSettings, clientContext); + grpcCallSettings, callSettings, clientContext); } } diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta1/stub/GrpcBigQueryStorageStub.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta1/stub/GrpcBigQueryStorageStub.java index 4ffa5f6309..204b0bb462 100644 --- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta1/stub/GrpcBigQueryStorageStub.java +++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta1/stub/GrpcBigQueryStorageStub.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,9 +13,9 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.bigquery.storage.v1beta1.stub; -import com.google.api.core.BetaApi; import com.google.api.gax.core.BackgroundResource; import com.google.api.gax.core.BackgroundResourceAggregation; import com.google.api.gax.grpc.GrpcCallSettings; @@ -24,16 +24,9 @@ import com.google.api.gax.rpc.RequestParamsExtractor; import com.google.api.gax.rpc.ServerStreamingCallable; import com.google.api.gax.rpc.UnaryCallable; -import com.google.cloud.bigquery.storage.v1beta1.Storage.BatchCreateReadSessionStreamsRequest; -import com.google.cloud.bigquery.storage.v1beta1.Storage.BatchCreateReadSessionStreamsResponse; -import com.google.cloud.bigquery.storage.v1beta1.Storage.CreateReadSessionRequest; -import com.google.cloud.bigquery.storage.v1beta1.Storage.FinalizeStreamRequest; -import com.google.cloud.bigquery.storage.v1beta1.Storage.ReadRowsRequest; -import com.google.cloud.bigquery.storage.v1beta1.Storage.ReadRowsResponse; -import com.google.cloud.bigquery.storage.v1beta1.Storage.ReadSession; -import com.google.cloud.bigquery.storage.v1beta1.Storage.SplitReadStreamRequest; -import com.google.cloud.bigquery.storage.v1beta1.Storage.SplitReadStreamResponse; +import com.google.cloud.bigquery.storage.v1beta1.Storage; import com.google.common.collect.ImmutableMap; +import com.google.longrunning.stub.GrpcOperationsStub; import com.google.protobuf.Empty; import io.grpc.MethodDescriptor; import io.grpc.protobuf.ProtoUtils; @@ -42,81 +35,95 @@ import java.util.concurrent.TimeUnit; import javax.annotation.Generated; -// AUTO-GENERATED DOCUMENTATION AND CLASS +// AUTO-GENERATED DOCUMENTATION AND CLASS. /** - * gRPC stub implementation for BigQuery Storage API. + * gRPC stub implementation for the BigQueryStorage service API. * *

This class is for advanced usage and reflects the underlying API directly. */ -@Generated("by gapic-generator") -@BetaApi("A restructuring of stub classes is planned, so this may break in the future") +@Generated("by gapic-generator-java") public class GrpcBigQueryStorageStub extends BigQueryStorageStub { - - private static final MethodDescriptor + private static final MethodDescriptor createReadSessionMethodDescriptor = - MethodDescriptor.newBuilder() + MethodDescriptor.newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName( "google.cloud.bigquery.storage.v1beta1.BigQueryStorage/CreateReadSession") .setRequestMarshaller( - ProtoUtils.marshaller(CreateReadSessionRequest.getDefaultInstance())) - .setResponseMarshaller(ProtoUtils.marshaller(ReadSession.getDefaultInstance())) + ProtoUtils.marshaller(Storage.CreateReadSessionRequest.getDefaultInstance())) + .setResponseMarshaller( + ProtoUtils.marshaller(Storage.ReadSession.getDefaultInstance())) .build(); - private static final MethodDescriptor + + private static final MethodDescriptor readRowsMethodDescriptor = - MethodDescriptor.newBuilder() + MethodDescriptor.newBuilder() .setType(MethodDescriptor.MethodType.SERVER_STREAMING) .setFullMethodName("google.cloud.bigquery.storage.v1beta1.BigQueryStorage/ReadRows") - .setRequestMarshaller(ProtoUtils.marshaller(ReadRowsRequest.getDefaultInstance())) - .setResponseMarshaller(ProtoUtils.marshaller(ReadRowsResponse.getDefaultInstance())) + .setRequestMarshaller( + ProtoUtils.marshaller(Storage.ReadRowsRequest.getDefaultInstance())) + .setResponseMarshaller( + ProtoUtils.marshaller(Storage.ReadRowsResponse.getDefaultInstance())) .build(); + private static final MethodDescriptor< - BatchCreateReadSessionStreamsRequest, BatchCreateReadSessionStreamsResponse> + Storage.BatchCreateReadSessionStreamsRequest, + Storage.BatchCreateReadSessionStreamsResponse> batchCreateReadSessionStreamsMethodDescriptor = MethodDescriptor - . + . newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName( "google.cloud.bigquery.storage.v1beta1.BigQueryStorage/BatchCreateReadSessionStreams") .setRequestMarshaller( - ProtoUtils.marshaller(BatchCreateReadSessionStreamsRequest.getDefaultInstance())) + ProtoUtils.marshaller( + Storage.BatchCreateReadSessionStreamsRequest.getDefaultInstance())) .setResponseMarshaller( - ProtoUtils.marshaller(BatchCreateReadSessionStreamsResponse.getDefaultInstance())) + ProtoUtils.marshaller( + Storage.BatchCreateReadSessionStreamsResponse.getDefaultInstance())) .build(); - private static final MethodDescriptor + + private static final MethodDescriptor finalizeStreamMethodDescriptor = - MethodDescriptor.newBuilder() + MethodDescriptor.newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName( "google.cloud.bigquery.storage.v1beta1.BigQueryStorage/FinalizeStream") .setRequestMarshaller( - ProtoUtils.marshaller(FinalizeStreamRequest.getDefaultInstance())) + ProtoUtils.marshaller(Storage.FinalizeStreamRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(Empty.getDefaultInstance())) .build(); - private static final MethodDescriptor + + private static final MethodDescriptor< + Storage.SplitReadStreamRequest, Storage.SplitReadStreamResponse> splitReadStreamMethodDescriptor = - MethodDescriptor.newBuilder() + MethodDescriptor + .newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName( "google.cloud.bigquery.storage.v1beta1.BigQueryStorage/SplitReadStream") .setRequestMarshaller( - ProtoUtils.marshaller(SplitReadStreamRequest.getDefaultInstance())) + ProtoUtils.marshaller(Storage.SplitReadStreamRequest.getDefaultInstance())) .setResponseMarshaller( - ProtoUtils.marshaller(SplitReadStreamResponse.getDefaultInstance())) + ProtoUtils.marshaller(Storage.SplitReadStreamResponse.getDefaultInstance())) .build(); - private final BackgroundResource backgroundResources; - - private final UnaryCallable createReadSessionCallable; - private final ServerStreamingCallable readRowsCallable; + private final UnaryCallable + createReadSessionCallable; + private final ServerStreamingCallable + readRowsCallable; private final UnaryCallable< - BatchCreateReadSessionStreamsRequest, BatchCreateReadSessionStreamsResponse> + Storage.BatchCreateReadSessionStreamsRequest, + Storage.BatchCreateReadSessionStreamsResponse> batchCreateReadSessionStreamsCallable; - private final UnaryCallable finalizeStreamCallable; - private final UnaryCallable + private final UnaryCallable finalizeStreamCallable; + private final UnaryCallable splitReadStreamCallable; + private final BackgroundResource backgroundResources; + private final GrpcOperationsStub operationsStub; private final GrpcStubCallableFactory callableFactory; public static final GrpcBigQueryStorageStub create(BigQueryStorageStubSettings settings) @@ -157,32 +164,34 @@ protected GrpcBigQueryStorageStub( GrpcStubCallableFactory callableFactory) throws IOException { this.callableFactory = callableFactory; + this.operationsStub = GrpcOperationsStub.create(clientContext, callableFactory); - GrpcCallSettings createReadSessionTransportSettings = - GrpcCallSettings.newBuilder() - .setMethodDescriptor(createReadSessionMethodDescriptor) - .setParamsExtractor( - new RequestParamsExtractor() { - @Override - public Map extract(CreateReadSessionRequest request) { - ImmutableMap.Builder params = ImmutableMap.builder(); - params.put( - "table_reference.project_id", - String.valueOf(request.getTableReference().getProjectId())); - params.put( - "table_reference.dataset_id", - String.valueOf(request.getTableReference().getDatasetId())); - return params.build(); - } - }) - .build(); - GrpcCallSettings readRowsTransportSettings = - GrpcCallSettings.newBuilder() + GrpcCallSettings + createReadSessionTransportSettings = + GrpcCallSettings.newBuilder() + .setMethodDescriptor(createReadSessionMethodDescriptor) + .setParamsExtractor( + new RequestParamsExtractor() { + @Override + public Map extract(Storage.CreateReadSessionRequest request) { + ImmutableMap.Builder params = ImmutableMap.builder(); + params.put( + "table_reference.dataset_id", + String.valueOf(request.getTableReference().getDatasetId())); + params.put( + "table_reference.project_id", + String.valueOf(request.getTableReference().getProjectId())); + return params.build(); + } + }) + .build(); + GrpcCallSettings readRowsTransportSettings = + GrpcCallSettings.newBuilder() .setMethodDescriptor(readRowsMethodDescriptor) .setParamsExtractor( - new RequestParamsExtractor() { + new RequestParamsExtractor() { @Override - public Map extract(ReadRowsRequest request) { + public Map extract(Storage.ReadRowsRequest request) { ImmutableMap.Builder params = ImmutableMap.builder(); params.put( "read_position.stream.name", @@ -191,44 +200,48 @@ public Map extract(ReadRowsRequest request) { } }) .build(); - GrpcCallSettings + GrpcCallSettings< + Storage.BatchCreateReadSessionStreamsRequest, + Storage.BatchCreateReadSessionStreamsResponse> batchCreateReadSessionStreamsTransportSettings = GrpcCallSettings - . + . newBuilder() .setMethodDescriptor(batchCreateReadSessionStreamsMethodDescriptor) .setParamsExtractor( - new RequestParamsExtractor() { + new RequestParamsExtractor() { @Override public Map extract( - BatchCreateReadSessionStreamsRequest request) { + Storage.BatchCreateReadSessionStreamsRequest request) { ImmutableMap.Builder params = ImmutableMap.builder(); params.put("session.name", String.valueOf(request.getSession().getName())); return params.build(); } }) .build(); - GrpcCallSettings finalizeStreamTransportSettings = - GrpcCallSettings.newBuilder() + GrpcCallSettings finalizeStreamTransportSettings = + GrpcCallSettings.newBuilder() .setMethodDescriptor(finalizeStreamMethodDescriptor) .setParamsExtractor( - new RequestParamsExtractor() { + new RequestParamsExtractor() { @Override - public Map extract(FinalizeStreamRequest request) { + public Map extract(Storage.FinalizeStreamRequest request) { ImmutableMap.Builder params = ImmutableMap.builder(); params.put("stream.name", String.valueOf(request.getStream().getName())); return params.build(); } }) .build(); - GrpcCallSettings + GrpcCallSettings splitReadStreamTransportSettings = - GrpcCallSettings.newBuilder() + GrpcCallSettings + .newBuilder() .setMethodDescriptor(splitReadStreamMethodDescriptor) .setParamsExtractor( - new RequestParamsExtractor() { + new RequestParamsExtractor() { @Override - public Map extract(SplitReadStreamRequest request) { + public Map extract(Storage.SplitReadStreamRequest request) { ImmutableMap.Builder params = ImmutableMap.builder(); params.put( "original_stream.name", @@ -258,27 +271,37 @@ public Map extract(SplitReadStreamRequest request) { callableFactory.createUnaryCallable( splitReadStreamTransportSettings, settings.splitReadStreamSettings(), clientContext); - backgroundResources = new BackgroundResourceAggregation(clientContext.getBackgroundResources()); + this.backgroundResources = + new BackgroundResourceAggregation(clientContext.getBackgroundResources()); + } + + public GrpcOperationsStub getOperationsStub() { + return operationsStub; } - public UnaryCallable createReadSessionCallable() { + public UnaryCallable + createReadSessionCallable() { return createReadSessionCallable; } - public ServerStreamingCallable readRowsCallable() { + public ServerStreamingCallable + readRowsCallable() { return readRowsCallable; } - public UnaryCallable + public UnaryCallable< + Storage.BatchCreateReadSessionStreamsRequest, + Storage.BatchCreateReadSessionStreamsResponse> batchCreateReadSessionStreamsCallable() { return batchCreateReadSessionStreamsCallable; } - public UnaryCallable finalizeStreamCallable() { + public UnaryCallable finalizeStreamCallable() { return finalizeStreamCallable; } - public UnaryCallable splitReadStreamCallable() { + public UnaryCallable + splitReadStreamCallable() { return splitReadStreamCallable; } diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/BaseBigQueryReadClient.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/BaseBigQueryReadClient.java index 12ac3ce6ca..0ab42e6b6e 100644 --- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/BaseBigQueryReadClient.java +++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/BaseBigQueryReadClient.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,6 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.bigquery.storage.v1beta2; import com.google.api.core.BetaApi; @@ -25,7 +26,7 @@ import java.util.concurrent.TimeUnit; import javax.annotation.Generated; -// AUTO-GENERATED DOCUMENTATION AND SERVICE +// AUTO-GENERATED DOCUMENTATION AND CLASS. /** * Service Description: BigQuery Read API. * @@ -37,18 +38,7 @@ *

This class provides the ability to make remote calls to the backing service through method * calls that map to API methods. Sample code to get started: * - *

- * 
- * try (BaseBigQueryReadClient baseBigQueryReadClient = BaseBigQueryReadClient.create()) {
- *   ProjectName parent = ProjectName.of("[PROJECT]");
- *   ReadSession readSession = ReadSession.newBuilder().build();
- *   int maxStreamCount = 0;
- *   ReadSession response = baseBigQueryReadClient.createReadSession(parent, readSession, maxStreamCount);
- * }
- * 
- * 
- * - *

Note: close() needs to be called on the baseBigQueryReadClient object to clean up resources + *

Note: close() needs to be called on the BaseBigQueryReadClient object to clean up resources * such as threads. In the example above, try-with-resources is used, which automatically calls * close(). * @@ -77,30 +67,28 @@ * *

To customize credentials: * - *

- * 
+ * 
{@code
  * BaseBigQueryReadSettings baseBigQueryReadSettings =
  *     BaseBigQueryReadSettings.newBuilder()
  *         .setCredentialsProvider(FixedCredentialsProvider.create(myCredentials))
  *         .build();
  * BaseBigQueryReadClient baseBigQueryReadClient =
  *     BaseBigQueryReadClient.create(baseBigQueryReadSettings);
- * 
- * 
+ * }
* - * To customize the endpoint: + *

To customize the endpoint: * - *

- * 
+ * 
{@code
  * BaseBigQueryReadSettings baseBigQueryReadSettings =
  *     BaseBigQueryReadSettings.newBuilder().setEndpoint(myEndpoint).build();
  * BaseBigQueryReadClient baseBigQueryReadClient =
  *     BaseBigQueryReadClient.create(baseBigQueryReadSettings);
- * 
- * 
+ * }
+ * + *

Please refer to the GitHub repository's samples for more quickstart code snippets. */ -@Generated("by gapic-generator") @BetaApi +@Generated("by gapic-generator") public class BaseBigQueryReadClient implements BackgroundResource { private final BaseBigQueryReadSettings settings; private final BigQueryReadStub stub; @@ -121,7 +109,7 @@ public static final BaseBigQueryReadClient create(BaseBigQueryReadSettings setti /** * Constructs an instance of BaseBigQueryReadClient, using the given stub for making calls. This - * is for advanced usage - prefer to use BaseBigQueryReadSettings}. + * is for advanced usage - prefer using create(BaseBigQueryReadSettings). */ @BetaApi("A restructuring of stub classes is planned, so this may break in the future") public static final BaseBigQueryReadClient create(BigQueryReadStub stub) { @@ -153,7 +141,7 @@ public BigQueryReadStub getStub() { return stub; } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Creates a new read session. A read session divides the contents of a BigQuery table into one or * more streams, which can then be used to read data from the table. The read session also @@ -172,17 +160,6 @@ public BigQueryReadStub getStub() { *

Read sessions automatically expire 24 hours after they are created and do not require manual * clean-up by the caller. * - *

Sample code: - * - *


-   * try (BaseBigQueryReadClient baseBigQueryReadClient = BaseBigQueryReadClient.create()) {
-   *   ProjectName parent = ProjectName.of("[PROJECT]");
-   *   ReadSession readSession = ReadSession.newBuilder().build();
-   *   int maxStreamCount = 0;
-   *   ReadSession response = baseBigQueryReadClient.createReadSession(parent, readSession, maxStreamCount);
-   * }
-   * 
- * * @param parent Required. The request project that owns the session, in the form of * `projects/{project_id}`. * @param readSession Required. Session to be created. @@ -205,7 +182,7 @@ public final ReadSession createReadSession( return createReadSession(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Creates a new read session. A read session divides the contents of a BigQuery table into one or * more streams, which can then be used to read data from the table. The read session also @@ -224,17 +201,6 @@ public final ReadSession createReadSession( *

Read sessions automatically expire 24 hours after they are created and do not require manual * clean-up by the caller. * - *

Sample code: - * - *


-   * try (BaseBigQueryReadClient baseBigQueryReadClient = BaseBigQueryReadClient.create()) {
-   *   ProjectName parent = ProjectName.of("[PROJECT]");
-   *   ReadSession readSession = ReadSession.newBuilder().build();
-   *   int maxStreamCount = 0;
-   *   ReadSession response = baseBigQueryReadClient.createReadSession(parent.toString(), readSession, maxStreamCount);
-   * }
-   * 
- * * @param parent Required. The request project that owns the session, in the form of * `projects/{project_id}`. * @param readSession Required. Session to be created. @@ -257,7 +223,7 @@ public final ReadSession createReadSession( return createReadSession(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Creates a new read session. A read session divides the contents of a BigQuery table into one or * more streams, which can then be used to read data from the table. The read session also @@ -276,20 +242,6 @@ public final ReadSession createReadSession( *

Read sessions automatically expire 24 hours after they are created and do not require manual * clean-up by the caller. * - *

Sample code: - * - *


-   * try (BaseBigQueryReadClient baseBigQueryReadClient = BaseBigQueryReadClient.create()) {
-   *   ProjectName parent = ProjectName.of("[PROJECT]");
-   *   ReadSession readSession = ReadSession.newBuilder().build();
-   *   CreateReadSessionRequest request = CreateReadSessionRequest.newBuilder()
-   *     .setParent(parent.toString())
-   *     .setReadSession(readSession)
-   *     .build();
-   *   ReadSession response = baseBigQueryReadClient.createReadSession(request);
-   * }
-   * 
- * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -297,7 +249,7 @@ public final ReadSession createReadSession(CreateReadSessionRequest request) { return createReadSessionCallable().call(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Creates a new read session. A read session divides the contents of a BigQuery table into one or * more streams, which can then be used to read data from the table. The read session also @@ -317,26 +269,12 @@ public final ReadSession createReadSession(CreateReadSessionRequest request) { * clean-up by the caller. * *

Sample code: - * - *


-   * try (BaseBigQueryReadClient baseBigQueryReadClient = BaseBigQueryReadClient.create()) {
-   *   ProjectName parent = ProjectName.of("[PROJECT]");
-   *   ReadSession readSession = ReadSession.newBuilder().build();
-   *   CreateReadSessionRequest request = CreateReadSessionRequest.newBuilder()
-   *     .setParent(parent.toString())
-   *     .setReadSession(readSession)
-   *     .build();
-   *   ApiFuture<ReadSession> future = baseBigQueryReadClient.createReadSessionCallable().futureCall(request);
-   *   // Do something
-   *   ReadSession response = future.get();
-   * }
-   * 
*/ public final UnaryCallable createReadSessionCallable() { return stub.createReadSessionCallable(); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Reads rows from the stream in the format prescribed by the ReadSession. Each response contains * one or more table rows, up to a maximum of 100 MiB per response; read requests which attempt to @@ -346,26 +284,12 @@ public final UnaryCallable createReadSess * stream. * *

Sample code: - * - *


-   * try (BaseBigQueryReadClient baseBigQueryReadClient = BaseBigQueryReadClient.create()) {
-   *   ReadStreamName readStream = ReadStreamName.of("[PROJECT]", "[LOCATION]", "[SESSION]", "[STREAM]");
-   *   ReadRowsRequest request = ReadRowsRequest.newBuilder()
-   *     .setReadStream(readStream.toString())
-   *     .build();
-   *
-   *   ServerStream<ReadRowsResponse> stream = baseBigQueryReadClient.readRowsCallable().call(request);
-   *   for (ReadRowsResponse response : stream) {
-   *     // Do something when receive a response
-   *   }
-   * }
-   * 
*/ public final ServerStreamingCallable readRowsCallable() { return stub.readRowsCallable(); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Splits a given `ReadStream` into two `ReadStream` objects. These `ReadStream` objects are * referred to as the primary and the residual streams of the split. The original `ReadStream` can @@ -378,18 +302,6 @@ public final ServerStreamingCallable readRows * original[0-j] = primary[0-j] and original[j-n] = residual[0-m] once the streams have been read * to completion. * - *

Sample code: - * - *


-   * try (BaseBigQueryReadClient baseBigQueryReadClient = BaseBigQueryReadClient.create()) {
-   *   ReadStreamName name = ReadStreamName.of("[PROJECT]", "[LOCATION]", "[SESSION]", "[STREAM]");
-   *   SplitReadStreamRequest request = SplitReadStreamRequest.newBuilder()
-   *     .setName(name.toString())
-   *     .build();
-   *   SplitReadStreamResponse response = baseBigQueryReadClient.splitReadStream(request);
-   * }
-   * 
- * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -397,7 +309,7 @@ public final SplitReadStreamResponse splitReadStream(SplitReadStreamRequest requ return splitReadStreamCallable().call(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Splits a given `ReadStream` into two `ReadStream` objects. These `ReadStream` objects are * referred to as the primary and the residual streams of the split. The original `ReadStream` can @@ -411,18 +323,6 @@ public final SplitReadStreamResponse splitReadStream(SplitReadStreamRequest requ * to completion. * *

Sample code: - * - *


-   * try (BaseBigQueryReadClient baseBigQueryReadClient = BaseBigQueryReadClient.create()) {
-   *   ReadStreamName name = ReadStreamName.of("[PROJECT]", "[LOCATION]", "[SESSION]", "[STREAM]");
-   *   SplitReadStreamRequest request = SplitReadStreamRequest.newBuilder()
-   *     .setName(name.toString())
-   *     .build();
-   *   ApiFuture<SplitReadStreamResponse> future = baseBigQueryReadClient.splitReadStreamCallable().futureCall(request);
-   *   // Do something
-   *   SplitReadStreamResponse response = future.get();
-   * }
-   * 
*/ public final UnaryCallable splitReadStreamCallable() { diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/BaseBigQueryReadSettings.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/BaseBigQueryReadSettings.java index 6570a55fc8..464224d2d1 100644 --- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/BaseBigQueryReadSettings.java +++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/BaseBigQueryReadSettings.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,6 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.bigquery.storage.v1beta2; import com.google.api.core.ApiFunction; @@ -31,7 +32,7 @@ import java.util.List; import javax.annotation.Generated; -// AUTO-GENERATED DOCUMENTATION AND CLASS +// AUTO-GENERATED DOCUMENTATION AND CLASS. /** * Settings class to configure an instance of {@link BaseBigQueryReadClient}. * @@ -49,23 +50,24 @@ * *

For example, to set the total timeout of createReadSession to 30 seconds: * - *

- * 
+ * 
{@code
  * BaseBigQueryReadSettings.Builder baseBigQueryReadSettingsBuilder =
  *     BaseBigQueryReadSettings.newBuilder();
  * baseBigQueryReadSettingsBuilder
  *     .createReadSessionSettings()
  *     .setRetrySettings(
- *         baseBigQueryReadSettingsBuilder.createReadSessionSettings().getRetrySettings().toBuilder()
+ *         baseBigQueryReadSettingsBuilder
+ *             .createReadSessionSettings()
+ *             .getRetrySettings()
+ *             .toBuilder()
  *             .setTotalTimeout(Duration.ofSeconds(30))
  *             .build());
  * BaseBigQueryReadSettings baseBigQueryReadSettings = baseBigQueryReadSettingsBuilder.build();
- * 
- * 
+ * }
*/ -@Generated("by gapic-generator") -@BetaApi +@Generated("by gapic-generator-java") public class BaseBigQueryReadSettings extends ClientSettings { + /** Returns the object with the settings used for calls to createReadSession. */ public UnaryCallSettings createReadSessionSettings() { return ((BigQueryReadStubSettings) getStubSettings()).createReadSessionSettings(); @@ -142,18 +144,15 @@ protected BaseBigQueryReadSettings(Builder settingsBuilder) throws IOException { /** Builder for BaseBigQueryReadSettings. */ public static class Builder extends ClientSettings.Builder { + protected Builder() throws IOException { - this((ClientContext) null); + this(((ClientContext) null)); } protected Builder(ClientContext clientContext) { super(BigQueryReadStubSettings.newBuilder(clientContext)); } - private static Builder createDefault() { - return new Builder(BigQueryReadStubSettings.newBuilder()); - } - protected Builder(BaseBigQueryReadSettings settings) { super(settings.getStubSettings().toBuilder()); } @@ -162,11 +161,15 @@ protected Builder(BigQueryReadStubSettings.Builder stubSettings) { super(stubSettings); } + private static Builder createDefault() { + return new Builder(BigQueryReadStubSettings.newBuilder()); + } + public BigQueryReadStubSettings.Builder getStubSettingsBuilder() { return ((BigQueryReadStubSettings.Builder) getStubSettings()); } - // NEXT_MAJOR_VER: remove 'throws Exception' + // NEXT_MAJOR_VER: remove 'throws Exception'. /** * Applies the given settings updater function to all of the unary API methods in this service. * diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/BigQueryWriteClient.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/BigQueryWriteClient.java index e902043f97..9fb7d464f5 100644 --- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/BigQueryWriteClient.java +++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/BigQueryWriteClient.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,6 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.bigquery.storage.v1beta2; import com.google.api.core.BetaApi; @@ -25,7 +26,7 @@ import java.util.concurrent.TimeUnit; import javax.annotation.Generated; -// AUTO-GENERATED DOCUMENTATION AND SERVICE +// AUTO-GENERATED DOCUMENTATION AND CLASS. /** * Service Description: BigQuery Write API. * @@ -34,17 +35,7 @@ *

This class provides the ability to make remote calls to the backing service through method * calls that map to API methods. Sample code to get started: * - *

- * 
- * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
- *   TableName parent = TableName.of("[PROJECT]", "[DATASET]", "[TABLE]");
- *   WriteStream writeStream = WriteStream.newBuilder().build();
- *   WriteStream response = bigQueryWriteClient.createWriteStream(parent, writeStream);
- * }
- * 
- * 
- * - *

Note: close() needs to be called on the bigQueryWriteClient object to clean up resources such + *

Note: close() needs to be called on the BigQueryWriteClient object to clean up resources such * as threads. In the example above, try-with-resources is used, which automatically calls close(). * *

The surface of this class includes several types of Java methods for each of the API's @@ -72,30 +63,26 @@ * *

To customize credentials: * - *

- * 
+ * 
{@code
  * BigQueryWriteSettings bigQueryWriteSettings =
  *     BigQueryWriteSettings.newBuilder()
  *         .setCredentialsProvider(FixedCredentialsProvider.create(myCredentials))
  *         .build();
- * BigQueryWriteClient bigQueryWriteClient =
- *     BigQueryWriteClient.create(bigQueryWriteSettings);
- * 
- * 
+ * BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create(bigQueryWriteSettings); + * }
* - * To customize the endpoint: + *

To customize the endpoint: * - *

- * 
+ * 
{@code
  * BigQueryWriteSettings bigQueryWriteSettings =
  *     BigQueryWriteSettings.newBuilder().setEndpoint(myEndpoint).build();
- * BigQueryWriteClient bigQueryWriteClient =
- *     BigQueryWriteClient.create(bigQueryWriteSettings);
- * 
- * 
+ * BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create(bigQueryWriteSettings); + * }
+ * + *

Please refer to the GitHub repository's samples for more quickstart code snippets. */ -@Generated("by gapic-generator") @BetaApi +@Generated("by gapic-generator") public class BigQueryWriteClient implements BackgroundResource { private final BigQueryWriteSettings settings; private final BigQueryWriteStub stub; @@ -116,7 +103,7 @@ public static final BigQueryWriteClient create(BigQueryWriteSettings settings) /** * Constructs an instance of BigQueryWriteClient, using the given stub for making calls. This is - * for advanced usage - prefer to use BigQueryWriteSettings}. + * for advanced usage - prefer using create(BigQueryWriteSettings). */ @BetaApi("A restructuring of stub classes is planned, so this may break in the future") public static final BigQueryWriteClient create(BigQueryWriteStub stub) { @@ -148,7 +135,7 @@ public BigQueryWriteStub getStub() { return stub; } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Creates a write stream to the given table. Additionally, every table has a special COMMITTED * stream named '_default' to which data can be written. This stream doesn't need to be created @@ -156,16 +143,6 @@ public BigQueryWriteStub getStub() { * clients. Data written to this stream is considered committed as soon as an acknowledgement is * received. * - *

Sample code: - * - *


-   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
-   *   TableName parent = TableName.of("[PROJECT]", "[DATASET]", "[TABLE]");
-   *   WriteStream writeStream = WriteStream.newBuilder().build();
-   *   WriteStream response = bigQueryWriteClient.createWriteStream(parent, writeStream);
-   * }
-   * 
- * * @param parent Required. Reference to the table to which the stream belongs, in the format of * `projects/{project}/datasets/{dataset}/tables/{table}`. * @param writeStream Required. Stream to be created. @@ -180,7 +157,7 @@ public final WriteStream createWriteStream(TableName parent, WriteStream writeSt return createWriteStream(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Creates a write stream to the given table. Additionally, every table has a special COMMITTED * stream named '_default' to which data can be written. This stream doesn't need to be created @@ -188,16 +165,6 @@ public final WriteStream createWriteStream(TableName parent, WriteStream writeSt * clients. Data written to this stream is considered committed as soon as an acknowledgement is * received. * - *

Sample code: - * - *


-   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
-   *   TableName parent = TableName.of("[PROJECT]", "[DATASET]", "[TABLE]");
-   *   WriteStream writeStream = WriteStream.newBuilder().build();
-   *   WriteStream response = bigQueryWriteClient.createWriteStream(parent.toString(), writeStream);
-   * }
-   * 
- * * @param parent Required. Reference to the table to which the stream belongs, in the format of * `projects/{project}/datasets/{dataset}/tables/{table}`. * @param writeStream Required. Stream to be created. @@ -209,7 +176,7 @@ public final WriteStream createWriteStream(String parent, WriteStream writeStrea return createWriteStream(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Creates a write stream to the given table. Additionally, every table has a special COMMITTED * stream named '_default' to which data can be written. This stream doesn't need to be created @@ -217,20 +184,6 @@ public final WriteStream createWriteStream(String parent, WriteStream writeStrea * clients. Data written to this stream is considered committed as soon as an acknowledgement is * received. * - *

Sample code: - * - *


-   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
-   *   TableName parent = TableName.of("[PROJECT]", "[DATASET]", "[TABLE]");
-   *   WriteStream writeStream = WriteStream.newBuilder().build();
-   *   CreateWriteStreamRequest request = CreateWriteStreamRequest.newBuilder()
-   *     .setParent(parent.toString())
-   *     .setWriteStream(writeStream)
-   *     .build();
-   *   WriteStream response = bigQueryWriteClient.createWriteStream(request);
-   * }
-   * 
- * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -238,7 +191,7 @@ public final WriteStream createWriteStream(CreateWriteStreamRequest request) { return createWriteStreamCallable().call(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Creates a write stream to the given table. Additionally, every table has a special COMMITTED * stream named '_default' to which data can be written. This stream doesn't need to be created @@ -247,26 +200,12 @@ public final WriteStream createWriteStream(CreateWriteStreamRequest request) { * received. * *

Sample code: - * - *


-   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
-   *   TableName parent = TableName.of("[PROJECT]", "[DATASET]", "[TABLE]");
-   *   WriteStream writeStream = WriteStream.newBuilder().build();
-   *   CreateWriteStreamRequest request = CreateWriteStreamRequest.newBuilder()
-   *     .setParent(parent.toString())
-   *     .setWriteStream(writeStream)
-   *     .build();
-   *   ApiFuture<WriteStream> future = bigQueryWriteClient.createWriteStreamCallable().futureCall(request);
-   *   // Do something
-   *   WriteStream response = future.get();
-   * }
-   * 
*/ public final UnaryCallable createWriteStreamCallable() { return stub.createWriteStreamCallable(); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Appends data to the given stream. * @@ -286,40 +225,15 @@ public final UnaryCallable createWriteStr * the stream is committed. * *

Sample code: - * - *


-   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
-   *   BidiStream<AppendRowsRequest, AppendRowsResponse> bidiStream =
-   *       bigQueryWriteClient.appendRowsCallable().call();
-   *
-   *   WriteStreamName writeStream = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]");
-   *   AppendRowsRequest request = AppendRowsRequest.newBuilder()
-   *     .setWriteStream(writeStream.toString())
-   *     .build();
-   *   bidiStream.send(request);
-   *   for (AppendRowsResponse response : bidiStream) {
-   *     // Do something when receive a response
-   *   }
-   * }
-   * 
*/ public final BidiStreamingCallable appendRowsCallable() { return stub.appendRowsCallable(); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Gets a write stream. * - *

Sample code: - * - *


-   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
-   *   WriteStreamName name = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]");
-   *   WriteStream response = bigQueryWriteClient.getWriteStream(name);
-   * }
-   * 
- * * @param name Required. Name of the stream to get, in the form of * `projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}`. * @throws com.google.api.gax.rpc.ApiException if the remote call fails @@ -330,19 +244,10 @@ public final WriteStream getWriteStream(WriteStreamName name) { return getWriteStream(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Gets a write stream. * - *

Sample code: - * - *


-   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
-   *   WriteStreamName name = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]");
-   *   WriteStream response = bigQueryWriteClient.getWriteStream(name.toString());
-   * }
-   * 
- * * @param name Required. Name of the stream to get, in the form of * `projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}`. * @throws com.google.api.gax.rpc.ApiException if the remote call fails @@ -352,22 +257,10 @@ public final WriteStream getWriteStream(String name) { return getWriteStream(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Gets a write stream. * - *

Sample code: - * - *


-   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
-   *   WriteStreamName name = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]");
-   *   GetWriteStreamRequest request = GetWriteStreamRequest.newBuilder()
-   *     .setName(name.toString())
-   *     .build();
-   *   WriteStream response = bigQueryWriteClient.getWriteStream(request);
-   * }
-   * 
- * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -375,42 +268,21 @@ public final WriteStream getWriteStream(GetWriteStreamRequest request) { return getWriteStreamCallable().call(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Gets a write stream. * *

Sample code: - * - *


-   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
-   *   WriteStreamName name = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]");
-   *   GetWriteStreamRequest request = GetWriteStreamRequest.newBuilder()
-   *     .setName(name.toString())
-   *     .build();
-   *   ApiFuture<WriteStream> future = bigQueryWriteClient.getWriteStreamCallable().futureCall(request);
-   *   // Do something
-   *   WriteStream response = future.get();
-   * }
-   * 
*/ public final UnaryCallable getWriteStreamCallable() { return stub.getWriteStreamCallable(); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Finalize a write stream so that no new data can be appended to the stream. Finalize is not * supported on the '_default' stream. * - *

Sample code: - * - *


-   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
-   *   WriteStreamName name = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]");
-   *   FinalizeWriteStreamResponse response = bigQueryWriteClient.finalizeWriteStream(name);
-   * }
-   * 
- * * @param name Required. Name of the stream to finalize, in the form of * `projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}`. * @throws com.google.api.gax.rpc.ApiException if the remote call fails @@ -423,20 +295,11 @@ public final FinalizeWriteStreamResponse finalizeWriteStream(WriteStreamName nam return finalizeWriteStream(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Finalize a write stream so that no new data can be appended to the stream. Finalize is not * supported on the '_default' stream. * - *

Sample code: - * - *


-   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
-   *   WriteStreamName name = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]");
-   *   FinalizeWriteStreamResponse response = bigQueryWriteClient.finalizeWriteStream(name.toString());
-   * }
-   * 
- * * @param name Required. Name of the stream to finalize, in the form of * `projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}`. * @throws com.google.api.gax.rpc.ApiException if the remote call fails @@ -447,23 +310,11 @@ public final FinalizeWriteStreamResponse finalizeWriteStream(String name) { return finalizeWriteStream(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Finalize a write stream so that no new data can be appended to the stream. Finalize is not * supported on the '_default' stream. * - *

Sample code: - * - *


-   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
-   *   WriteStreamName name = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]");
-   *   FinalizeWriteStreamRequest request = FinalizeWriteStreamRequest.newBuilder()
-   *     .setName(name.toString())
-   *     .build();
-   *   FinalizeWriteStreamResponse response = bigQueryWriteClient.finalizeWriteStream(request);
-   * }
-   * 
- * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -471,45 +322,24 @@ public final FinalizeWriteStreamResponse finalizeWriteStream(FinalizeWriteStream return finalizeWriteStreamCallable().call(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Finalize a write stream so that no new data can be appended to the stream. Finalize is not * supported on the '_default' stream. * *

Sample code: - * - *


-   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
-   *   WriteStreamName name = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]");
-   *   FinalizeWriteStreamRequest request = FinalizeWriteStreamRequest.newBuilder()
-   *     .setName(name.toString())
-   *     .build();
-   *   ApiFuture<FinalizeWriteStreamResponse> future = bigQueryWriteClient.finalizeWriteStreamCallable().futureCall(request);
-   *   // Do something
-   *   FinalizeWriteStreamResponse response = future.get();
-   * }
-   * 
*/ public final UnaryCallable finalizeWriteStreamCallable() { return stub.finalizeWriteStreamCallable(); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Atomically commits a group of `PENDING` streams that belong to the same `parent` table. Streams * must be finalized before commit and cannot be committed multiple times. Once a stream is * committed, data in the stream becomes available for read operations. * - *

Sample code: - * - *


-   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
-   *   String parent = "";
-   *   BatchCommitWriteStreamsResponse response = bigQueryWriteClient.batchCommitWriteStreams(parent);
-   * }
-   * 
- * * @param parent Required. Parent table that all the streams should belong to, in the form of * `projects/{project}/datasets/{dataset}/tables/{table}`. * @throws com.google.api.gax.rpc.ApiException if the remote call fails @@ -520,26 +350,12 @@ public final BatchCommitWriteStreamsResponse batchCommitWriteStreams(String pare return batchCommitWriteStreams(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Atomically commits a group of `PENDING` streams that belong to the same `parent` table. Streams * must be finalized before commit and cannot be committed multiple times. Once a stream is * committed, data in the stream becomes available for read operations. * - *

Sample code: - * - *


-   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
-   *   String parent = "";
-   *   List<String> writeStreams = new ArrayList<>();
-   *   BatchCommitWriteStreamsRequest request = BatchCommitWriteStreamsRequest.newBuilder()
-   *     .setParent(parent)
-   *     .addAllWriteStreams(writeStreams)
-   *     .build();
-   *   BatchCommitWriteStreamsResponse response = bigQueryWriteClient.batchCommitWriteStreams(request);
-   * }
-   * 
- * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -548,49 +364,26 @@ public final BatchCommitWriteStreamsResponse batchCommitWriteStreams( return batchCommitWriteStreamsCallable().call(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Atomically commits a group of `PENDING` streams that belong to the same `parent` table. Streams * must be finalized before commit and cannot be committed multiple times. Once a stream is * committed, data in the stream becomes available for read operations. * *

Sample code: - * - *


-   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
-   *   String parent = "";
-   *   List<String> writeStreams = new ArrayList<>();
-   *   BatchCommitWriteStreamsRequest request = BatchCommitWriteStreamsRequest.newBuilder()
-   *     .setParent(parent)
-   *     .addAllWriteStreams(writeStreams)
-   *     .build();
-   *   ApiFuture<BatchCommitWriteStreamsResponse> future = bigQueryWriteClient.batchCommitWriteStreamsCallable().futureCall(request);
-   *   // Do something
-   *   BatchCommitWriteStreamsResponse response = future.get();
-   * }
-   * 
*/ public final UnaryCallable batchCommitWriteStreamsCallable() { return stub.batchCommitWriteStreamsCallable(); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Flushes rows to a BUFFERED stream. If users are appending rows to BUFFERED stream, flush * operation is required in order for the rows to become available for reading. A Flush operation * flushes up to any previously flushed offset in a BUFFERED stream, to the offset specified in * the request. Flush is not supported on the _default stream, since it is not BUFFERED. * - *

Sample code: - * - *


-   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
-   *   WriteStreamName writeStream = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]");
-   *   FlushRowsResponse response = bigQueryWriteClient.flushRows(writeStream);
-   * }
-   * 
- * * @param writeStream Required. The stream that is the target of the flush operation. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -602,22 +395,13 @@ public final FlushRowsResponse flushRows(WriteStreamName writeStream) { return flushRows(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Flushes rows to a BUFFERED stream. If users are appending rows to BUFFERED stream, flush * operation is required in order for the rows to become available for reading. A Flush operation * flushes up to any previously flushed offset in a BUFFERED stream, to the offset specified in * the request. Flush is not supported on the _default stream, since it is not BUFFERED. * - *

Sample code: - * - *


-   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
-   *   WriteStreamName writeStream = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]");
-   *   FlushRowsResponse response = bigQueryWriteClient.flushRows(writeStream.toString());
-   * }
-   * 
- * * @param writeStream Required. The stream that is the target of the flush operation. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -626,25 +410,13 @@ public final FlushRowsResponse flushRows(String writeStream) { return flushRows(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Flushes rows to a BUFFERED stream. If users are appending rows to BUFFERED stream, flush * operation is required in order for the rows to become available for reading. A Flush operation * flushes up to any previously flushed offset in a BUFFERED stream, to the offset specified in * the request. Flush is not supported on the _default stream, since it is not BUFFERED. * - *

Sample code: - * - *


-   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
-   *   WriteStreamName writeStream = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]");
-   *   FlushRowsRequest request = FlushRowsRequest.newBuilder()
-   *     .setWriteStream(writeStream.toString())
-   *     .build();
-   *   FlushRowsResponse response = bigQueryWriteClient.flushRows(request);
-   * }
-   * 
- * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -652,7 +424,7 @@ public final FlushRowsResponse flushRows(FlushRowsRequest request) { return flushRowsCallable().call(request); } - // AUTO-GENERATED DOCUMENTATION AND METHOD + // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Flushes rows to a BUFFERED stream. If users are appending rows to BUFFERED stream, flush * operation is required in order for the rows to become available for reading. A Flush operation @@ -660,18 +432,6 @@ public final FlushRowsResponse flushRows(FlushRowsRequest request) { * the request. Flush is not supported on the _default stream, since it is not BUFFERED. * *

Sample code: - * - *


-   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
-   *   WriteStreamName writeStream = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]");
-   *   FlushRowsRequest request = FlushRowsRequest.newBuilder()
-   *     .setWriteStream(writeStream.toString())
-   *     .build();
-   *   ApiFuture<FlushRowsResponse> future = bigQueryWriteClient.flushRowsCallable().futureCall(request);
-   *   // Do something
-   *   FlushRowsResponse response = future.get();
-   * }
-   * 
*/ public final UnaryCallable flushRowsCallable() { return stub.flushRowsCallable(); diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/BigQueryWriteSettings.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/BigQueryWriteSettings.java index 154534dbdc..3ec1da642e 100644 --- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/BigQueryWriteSettings.java +++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/BigQueryWriteSettings.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,6 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.bigquery.storage.v1beta2; import com.google.api.core.ApiFunction; @@ -31,7 +32,7 @@ import java.util.List; import javax.annotation.Generated; -// AUTO-GENERATED DOCUMENTATION AND CLASS +// AUTO-GENERATED DOCUMENTATION AND CLASS. /** * Settings class to configure an instance of {@link BigQueryWriteClient}. * @@ -49,23 +50,23 @@ * *

For example, to set the total timeout of createWriteStream to 30 seconds: * - *

- * 
- * BigQueryWriteSettings.Builder bigQueryWriteSettingsBuilder =
- *     BigQueryWriteSettings.newBuilder();
+ * 
{@code
+ * BigQueryWriteSettings.Builder bigQueryWriteSettingsBuilder = BigQueryWriteSettings.newBuilder();
  * bigQueryWriteSettingsBuilder
  *     .createWriteStreamSettings()
  *     .setRetrySettings(
- *         bigQueryWriteSettingsBuilder.createWriteStreamSettings().getRetrySettings().toBuilder()
+ *         bigQueryWriteSettingsBuilder
+ *             .createWriteStreamSettings()
+ *             .getRetrySettings()
+ *             .toBuilder()
  *             .setTotalTimeout(Duration.ofSeconds(30))
  *             .build());
  * BigQueryWriteSettings bigQueryWriteSettings = bigQueryWriteSettingsBuilder.build();
- * 
- * 
+ * }
*/ -@Generated("by gapic-generator") -@BetaApi +@Generated("by gapic-generator-java") public class BigQueryWriteSettings extends ClientSettings { + /** Returns the object with the settings used for calls to createWriteStream. */ public UnaryCallSettings createWriteStreamSettings() { return ((BigQueryWriteStubSettings) getStubSettings()).createWriteStreamSettings(); @@ -158,18 +159,15 @@ protected BigQueryWriteSettings(Builder settingsBuilder) throws IOException { /** Builder for BigQueryWriteSettings. */ public static class Builder extends ClientSettings.Builder { + protected Builder() throws IOException { - this((ClientContext) null); + this(((ClientContext) null)); } protected Builder(ClientContext clientContext) { super(BigQueryWriteStubSettings.newBuilder(clientContext)); } - private static Builder createDefault() { - return new Builder(BigQueryWriteStubSettings.newBuilder()); - } - protected Builder(BigQueryWriteSettings settings) { super(settings.getStubSettings().toBuilder()); } @@ -178,11 +176,15 @@ protected Builder(BigQueryWriteStubSettings.Builder stubSettings) { super(stubSettings); } + private static Builder createDefault() { + return new Builder(BigQueryWriteStubSettings.newBuilder()); + } + public BigQueryWriteStubSettings.Builder getStubSettingsBuilder() { return ((BigQueryWriteStubSettings.Builder) getStubSettings()); } - // NEXT_MAJOR_VER: remove 'throws Exception' + // NEXT_MAJOR_VER: remove 'throws Exception'. /** * Applies the given settings updater function to all of the unary API methods in this service. * diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/package-info.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/package-info.java index b53d96890d..da92a24b0c 100644 --- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/package-info.java +++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/package-info.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,11 +15,9 @@ */ /** - * A client to BigQuery Storage API. + * The interfaces provided are listed below, along with usage samples. * - *

The interfaces provided are listed below, along with usage samples. - * - *

====================== BaseBigQueryReadClient ====================== + *

======================= BigQueryReadClient ======================= * *

Service Description: BigQuery Read API. * @@ -28,38 +26,17 @@ *

New code should use the v1 Read API going forward, if they don't use Write API at the same * time. * - *

Sample for BaseBigQueryReadClient: - * - *

- * 
- * try (BaseBigQueryReadClient baseBigQueryReadClient = BaseBigQueryReadClient.create()) {
- *   ProjectName parent = ProjectName.of("[PROJECT]");
- *   ReadSession readSession = ReadSession.newBuilder().build();
- *   int maxStreamCount = 0;
- *   ReadSession response = baseBigQueryReadClient.createReadSession(parent, readSession, maxStreamCount);
- * }
- * 
- * 
+ *

Sample for BigQueryReadClient: * - * =================== BigQueryWriteClient =================== + *

======================= BigQueryWriteClient ======================= * *

Service Description: BigQuery Write API. * *

The Write API can be used to write data to BigQuery. * *

Sample for BigQueryWriteClient: - * - *

- * 
- * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
- *   TableName parent = TableName.of("[PROJECT]", "[DATASET]", "[TABLE]");
- *   WriteStream writeStream = WriteStream.newBuilder().build();
- *   WriteStream response = bigQueryWriteClient.createWriteStream(parent, writeStream);
- * }
- * 
- * 
*/ -@Generated("by gapic-generator") +@Generated("by gapic-generator-java") package com.google.cloud.bigquery.storage.v1beta2; import javax.annotation.Generated; diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/stub/BigQueryReadStub.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/stub/BigQueryReadStub.java index 116f00c54f..ee9bfd6aba 100644 --- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/stub/BigQueryReadStub.java +++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/stub/BigQueryReadStub.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,9 +13,9 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.bigquery.storage.v1beta2.stub; -import com.google.api.core.BetaApi; import com.google.api.gax.core.BackgroundResource; import com.google.api.gax.rpc.ServerStreamingCallable; import com.google.api.gax.rpc.UnaryCallable; @@ -27,14 +27,13 @@ import com.google.cloud.bigquery.storage.v1beta2.SplitReadStreamResponse; import javax.annotation.Generated; -// AUTO-GENERATED DOCUMENTATION AND CLASS +// AUTO-GENERATED DOCUMENTATION AND CLASS. /** - * Base stub class for BigQuery Storage API. + * Base stub class for the BigQueryRead service API. * *

This class is for advanced usage and reflects the underlying API directly. */ @Generated("by gapic-generator") -@BetaApi("A restructuring of stub classes is planned, so this may break in the future") public abstract class BigQueryReadStub implements BackgroundResource { public UnaryCallable createReadSessionCallable() { diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/stub/BigQueryReadStubSettings.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/stub/BigQueryReadStubSettings.java index 1e9f940a35..bb79df916a 100644 --- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/stub/BigQueryReadStubSettings.java +++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/stub/BigQueryReadStubSettings.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,6 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.bigquery.storage.v1beta2.stub; import com.google.api.core.ApiFunction; @@ -46,7 +47,7 @@ import javax.annotation.Generated; import org.threeten.bp.Duration; -// AUTO-GENERATED DOCUMENTATION AND CLASS +// AUTO-GENERATED DOCUMENTATION AND CLASS. /** * Settings class to configure an instance of {@link BigQueryReadStub}. * @@ -64,28 +65,28 @@ * *

For example, to set the total timeout of createReadSession to 30 seconds: * - *

- * 
- * BigQueryReadStubSettings.Builder baseBigQueryReadSettingsBuilder =
+ * 
{@code
+ * BigQueryReadStubSettings.Builder bigQueryReadSettingsBuilder =
  *     BigQueryReadStubSettings.newBuilder();
- * baseBigQueryReadSettingsBuilder
+ * bigQueryReadSettingsBuilder
  *     .createReadSessionSettings()
  *     .setRetrySettings(
- *         baseBigQueryReadSettingsBuilder.createReadSessionSettings().getRetrySettings().toBuilder()
+ *         bigQueryReadSettingsBuilder
+ *             .createReadSessionSettings()
+ *             .getRetrySettings()
+ *             .toBuilder()
  *             .setTotalTimeout(Duration.ofSeconds(30))
  *             .build());
- * BigQueryReadStubSettings baseBigQueryReadSettings = baseBigQueryReadSettingsBuilder.build();
- * 
- * 
+ * BigQueryReadStubSettings bigQueryReadSettings = bigQueryReadSettingsBuilder.build(); + * }
*/ -@Generated("by gapic-generator") @BetaApi +@Generated("by gapic-generator-java") public class BigQueryReadStubSettings extends StubSettings { /** The default scopes of the service. */ private static final ImmutableList DEFAULT_SERVICE_SCOPES = ImmutableList.builder() .add("https://www.googleapis.com/auth/bigquery") - .add("https://www.googleapis.com/auth/bigquery.insertdata") .add("https://www.googleapis.com/auth/bigquery.readonly") .add("https://www.googleapis.com/auth/cloud-platform") .build(); @@ -117,10 +118,10 @@ public BigQueryReadStub createStub() throws IOException { .getTransportName() .equals(GrpcTransportChannel.getGrpcTransportName())) { return GrpcBigQueryReadStub.create(this); - } else { - throw new UnsupportedOperationException( - "Transport not supported: " + getTransportChannelProvider().getTransportName()); } + throw new UnsupportedOperationException( + String.format( + "Transport not supported: %s", getTransportChannelProvider().getTransportName())); } /** Returns a builder for the default ExecutorProvider for this service. */ @@ -188,14 +189,12 @@ protected BigQueryReadStubSettings(Builder settingsBuilder) throws IOException { /** Builder for BigQueryReadStubSettings. */ public static class Builder extends StubSettings.Builder { private final ImmutableList> unaryMethodSettingsBuilders; - private final UnaryCallSettings.Builder createReadSessionSettings; private final ServerStreamingCallSettings.Builder readRowsSettings; private final UnaryCallSettings.Builder splitReadStreamSettings; - private static final ImmutableMap> RETRYABLE_CODE_DEFINITIONS; @@ -203,36 +202,18 @@ public static class Builder extends StubSettings.Builder> definitions = ImmutableMap.builder(); definitions.put( - "retry_policy_1_codes", - ImmutableSet.copyOf( - Lists.newArrayList( - StatusCode.Code.DEADLINE_EXCEEDED, StatusCode.Code.UNAVAILABLE))); - definitions.put( - "retry_policy_4_codes", - ImmutableSet.copyOf( - Lists.newArrayList( - StatusCode.Code.DEADLINE_EXCEEDED, - StatusCode.Code.UNAVAILABLE, - StatusCode.Code.RESOURCE_EXHAUSTED))); - definitions.put( - "retry_policy_6_codes", + "retry_policy_0_codes", ImmutableSet.copyOf( Lists.newArrayList( StatusCode.Code.DEADLINE_EXCEEDED, StatusCode.Code.UNAVAILABLE))); - definitions.put("no_retry_codes", ImmutableSet.copyOf(Lists.newArrayList())); definitions.put( - "retry_policy_3_codes", - ImmutableSet.copyOf( - Lists.newArrayList( - StatusCode.Code.DEADLINE_EXCEEDED, StatusCode.Code.UNAVAILABLE))); - definitions.put( - "retry_policy_2_codes", + "retry_policy_1_codes", ImmutableSet.copyOf(Lists.newArrayList(StatusCode.Code.UNAVAILABLE))); definitions.put( - "retry_policy_5_codes", + "retry_policy_2_codes", ImmutableSet.copyOf( Lists.newArrayList( - StatusCode.Code.UNAVAILABLE, StatusCode.Code.RESOURCE_EXHAUSTED))); + StatusCode.Code.DEADLINE_EXCEEDED, StatusCode.Code.UNAVAILABLE))); RETRYABLE_CODE_DEFINITIONS = definitions.build(); } @@ -251,29 +232,7 @@ public static class Builder extends StubSettings.Builder>of( createReadSessionSettings, splitReadStreamSettings); - initDefaults(this); } + protected Builder(BigQueryReadStubSettings settings) { + super(settings); + + createReadSessionSettings = settings.createReadSessionSettings.toBuilder(); + readRowsSettings = settings.readRowsSettings.toBuilder(); + splitReadStreamSettings = settings.splitReadStreamSettings.toBuilder(); + + unaryMethodSettingsBuilders = + ImmutableList.>of( + createReadSessionSettings, splitReadStreamSettings); + } + private static Builder createDefault() { - Builder builder = new Builder((ClientContext) null); + Builder builder = new Builder(((ClientContext) null)); + builder.setTransportChannelProvider(defaultTransportChannelProvider()); builder.setCredentialsProvider(defaultCredentialsProviderBuilder().build()); builder.setInternalHeaderProvider(defaultApiClientHeaderProviderBuilder().build()); builder.setEndpoint(getDefaultEndpoint()); + return initDefaults(builder); } private static Builder initDefaults(Builder builder) { - builder .createReadSessionSettings() - .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_1_codes")) - .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_1_params")); + .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) + .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")); builder .readRowsSettings() - .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_2_codes")) - .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_2_params")); + .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_1_codes")) + .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_1_params")); builder .splitReadStreamSettings() - .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_3_codes")) - .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_3_params")); + .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_2_codes")) + .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_2_params")); return builder; } - protected Builder(BigQueryReadStubSettings settings) { - super(settings); - - createReadSessionSettings = settings.createReadSessionSettings.toBuilder(); - readRowsSettings = settings.readRowsSettings.toBuilder(); - splitReadStreamSettings = settings.splitReadStreamSettings.toBuilder(); - - unaryMethodSettingsBuilders = - ImmutableList.>of( - createReadSessionSettings, splitReadStreamSettings); - } - - // NEXT_MAJOR_VER: remove 'throws Exception' + // NEXT_MAJOR_VER: remove 'throws Exception'. /** * Applies the given settings updater function to all of the unary API methods in this service. * diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/stub/BigQueryWriteStub.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/stub/BigQueryWriteStub.java index cc569eeb24..cedc3d4d33 100644 --- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/stub/BigQueryWriteStub.java +++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/stub/BigQueryWriteStub.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,9 +13,9 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.bigquery.storage.v1beta2.stub; -import com.google.api.core.BetaApi; import com.google.api.gax.core.BackgroundResource; import com.google.api.gax.rpc.BidiStreamingCallable; import com.google.api.gax.rpc.UnaryCallable; @@ -32,14 +32,13 @@ import com.google.cloud.bigquery.storage.v1beta2.WriteStream; import javax.annotation.Generated; -// AUTO-GENERATED DOCUMENTATION AND CLASS +// AUTO-GENERATED DOCUMENTATION AND CLASS. /** - * Base stub class for BigQuery Storage API. + * Base stub class for the BigQueryWrite service API. * *

This class is for advanced usage and reflects the underlying API directly. */ @Generated("by gapic-generator") -@BetaApi("A restructuring of stub classes is planned, so this may break in the future") public abstract class BigQueryWriteStub implements BackgroundResource { public UnaryCallable createWriteStreamCallable() { diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/stub/BigQueryWriteStubSettings.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/stub/BigQueryWriteStubSettings.java index 4e945907d9..ebe08eda20 100644 --- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/stub/BigQueryWriteStubSettings.java +++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/stub/BigQueryWriteStubSettings.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,6 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.bigquery.storage.v1beta2.stub; import com.google.api.core.ApiFunction; @@ -51,7 +52,7 @@ import javax.annotation.Generated; import org.threeten.bp.Duration; -// AUTO-GENERATED DOCUMENTATION AND CLASS +// AUTO-GENERATED DOCUMENTATION AND CLASS. /** * Settings class to configure an instance of {@link BigQueryWriteStub}. * @@ -69,29 +70,29 @@ * *

For example, to set the total timeout of createWriteStream to 30 seconds: * - *

- * 
+ * 
{@code
  * BigQueryWriteStubSettings.Builder bigQueryWriteSettingsBuilder =
  *     BigQueryWriteStubSettings.newBuilder();
  * bigQueryWriteSettingsBuilder
  *     .createWriteStreamSettings()
  *     .setRetrySettings(
- *         bigQueryWriteSettingsBuilder.createWriteStreamSettings().getRetrySettings().toBuilder()
+ *         bigQueryWriteSettingsBuilder
+ *             .createWriteStreamSettings()
+ *             .getRetrySettings()
+ *             .toBuilder()
  *             .setTotalTimeout(Duration.ofSeconds(30))
  *             .build());
  * BigQueryWriteStubSettings bigQueryWriteSettings = bigQueryWriteSettingsBuilder.build();
- * 
- * 
+ * }
*/ -@Generated("by gapic-generator") @BetaApi +@Generated("by gapic-generator-java") public class BigQueryWriteStubSettings extends StubSettings { /** The default scopes of the service. */ private static final ImmutableList DEFAULT_SERVICE_SCOPES = ImmutableList.builder() .add("https://www.googleapis.com/auth/bigquery") .add("https://www.googleapis.com/auth/bigquery.insertdata") - .add("https://www.googleapis.com/auth/bigquery.readonly") .add("https://www.googleapis.com/auth/cloud-platform") .build(); @@ -142,10 +143,10 @@ public BigQueryWriteStub createStub() throws IOException { .getTransportName() .equals(GrpcTransportChannel.getGrpcTransportName())) { return GrpcBigQueryWriteStub.create(this); - } else { - throw new UnsupportedOperationException( - "Transport not supported: " + getTransportChannelProvider().getTransportName()); } + throw new UnsupportedOperationException( + String.format( + "Transport not supported: %s", getTransportChannelProvider().getTransportName())); } /** Returns a builder for the default ExecutorProvider for this service. */ @@ -216,7 +217,6 @@ protected BigQueryWriteStubSettings(Builder settingsBuilder) throws IOException /** Builder for BigQueryWriteStubSettings. */ public static class Builder extends StubSettings.Builder { private final ImmutableList> unaryMethodSettingsBuilders; - private final UnaryCallSettings.Builder createWriteStreamSettings; private final StreamingCallSettings.Builder @@ -229,7 +229,6 @@ public static class Builder extends StubSettings.Builder batchCommitWriteStreamsSettings; private final UnaryCallSettings.Builder flushRowsSettings; - private static final ImmutableMap> RETRYABLE_CODE_DEFINITIONS; @@ -237,36 +236,22 @@ public static class Builder extends StubSettings.Builder> definitions = ImmutableMap.builder(); definitions.put( - "retry_policy_1_codes", - ImmutableSet.copyOf( - Lists.newArrayList( - StatusCode.Code.DEADLINE_EXCEEDED, StatusCode.Code.UNAVAILABLE))); - definitions.put( - "retry_policy_4_codes", + "retry_policy_3_codes", ImmutableSet.copyOf( Lists.newArrayList( StatusCode.Code.DEADLINE_EXCEEDED, StatusCode.Code.UNAVAILABLE, StatusCode.Code.RESOURCE_EXHAUSTED))); definitions.put( - "retry_policy_6_codes", - ImmutableSet.copyOf( - Lists.newArrayList( - StatusCode.Code.DEADLINE_EXCEEDED, StatusCode.Code.UNAVAILABLE))); - definitions.put("no_retry_codes", ImmutableSet.copyOf(Lists.newArrayList())); - definitions.put( - "retry_policy_3_codes", + "retry_policy_4_codes", ImmutableSet.copyOf( Lists.newArrayList( - StatusCode.Code.DEADLINE_EXCEEDED, StatusCode.Code.UNAVAILABLE))); - definitions.put( - "retry_policy_2_codes", - ImmutableSet.copyOf(Lists.newArrayList(StatusCode.Code.UNAVAILABLE))); + StatusCode.Code.UNAVAILABLE, StatusCode.Code.RESOURCE_EXHAUSTED))); definitions.put( "retry_policy_5_codes", ImmutableSet.copyOf( Lists.newArrayList( - StatusCode.Code.UNAVAILABLE, StatusCode.Code.RESOURCE_EXHAUSTED))); + StatusCode.Code.DEADLINE_EXCEEDED, StatusCode.Code.UNAVAILABLE))); RETRYABLE_CODE_DEFINITIONS = definitions.build(); } @@ -275,28 +260,6 @@ public static class Builder extends StubSettings.Builder definitions = ImmutableMap.builder(); RetrySettings settings = null; - settings = - RetrySettings.newBuilder() - .setInitialRetryDelay(Duration.ofMillis(100L)) - .setRetryDelayMultiplier(1.3) - .setMaxRetryDelay(Duration.ofMillis(60000L)) - .setInitialRpcTimeout(Duration.ofMillis(600000L)) - .setRpcTimeoutMultiplier(1.0) - .setMaxRpcTimeout(Duration.ofMillis(600000L)) - .setTotalTimeout(Duration.ofMillis(600000L)) - .build(); - definitions.put("retry_policy_1_params", settings); - settings = - RetrySettings.newBuilder() - .setInitialRetryDelay(Duration.ofMillis(100L)) - .setRetryDelayMultiplier(1.3) - .setMaxRetryDelay(Duration.ofMillis(60000L)) - .setInitialRpcTimeout(Duration.ofMillis(600000L)) - .setRpcTimeoutMultiplier(1.0) - .setMaxRpcTimeout(Duration.ofMillis(600000L)) - .setTotalTimeout(Duration.ofMillis(600000L)) - .build(); - definitions.put("retry_policy_6_params", settings); settings = RetrySettings.newBuilder() .setInitialRetryDelay(Duration.ofMillis(100L)) @@ -318,7 +281,7 @@ public static class Builder extends StubSettings.Builder>of( + createWriteStreamSettings, + getWriteStreamSettings, + finalizeWriteStreamSettings, + batchCommitWriteStreamsSettings, + flushRowsSettings); + } + private static Builder createDefault() { - Builder builder = new Builder((ClientContext) null); + Builder builder = new Builder(((ClientContext) null)); + builder.setTransportChannelProvider(defaultTransportChannelProvider()); builder.setCredentialsProvider(defaultCredentialsProviderBuilder().build()); builder.setInternalHeaderProvider(defaultApiClientHeaderProviderBuilder().build()); builder.setEndpoint(getDefaultEndpoint()); + return initDefaults(builder); } private static Builder initDefaults(Builder builder) { - builder .createWriteStreamSettings() - .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_4_codes")) - .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_4_params")); + .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_3_codes")) + .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_3_params")); builder .getWriteStreamSettings() - .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_6_codes")) - .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_6_params")); + .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_5_codes")) + .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_5_params")); builder .finalizeWriteStreamSettings() - .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_6_codes")) - .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_6_params")); + .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_5_codes")) + .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_5_params")); builder .batchCommitWriteStreamsSettings() - .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_6_codes")) - .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_6_params")); + .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_5_codes")) + .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_5_params")); builder .flushRowsSettings() - .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_6_codes")) - .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_6_params")); + .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_5_codes")) + .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_5_params")); return builder; } - protected Builder(BigQueryWriteStubSettings settings) { - super(settings); - - createWriteStreamSettings = settings.createWriteStreamSettings.toBuilder(); - appendRowsSettings = settings.appendRowsSettings.toBuilder(); - getWriteStreamSettings = settings.getWriteStreamSettings.toBuilder(); - finalizeWriteStreamSettings = settings.finalizeWriteStreamSettings.toBuilder(); - batchCommitWriteStreamsSettings = settings.batchCommitWriteStreamsSettings.toBuilder(); - flushRowsSettings = settings.flushRowsSettings.toBuilder(); - - unaryMethodSettingsBuilders = - ImmutableList.>of( - createWriteStreamSettings, - getWriteStreamSettings, - finalizeWriteStreamSettings, - batchCommitWriteStreamsSettings, - flushRowsSettings); - } - - // NEXT_MAJOR_VER: remove 'throws Exception' + // NEXT_MAJOR_VER: remove 'throws Exception'. /** * Applies the given settings updater function to all of the unary API methods in this service. * diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/stub/GrpcBigQueryReadCallableFactory.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/stub/GrpcBigQueryReadCallableFactory.java index a66a898f09..85844b1ed9 100644 --- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/stub/GrpcBigQueryReadCallableFactory.java +++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/stub/GrpcBigQueryReadCallableFactory.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,9 +13,9 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.bigquery.storage.v1beta2.stub; -import com.google.api.core.BetaApi; import com.google.api.gax.grpc.GrpcCallSettings; import com.google.api.gax.grpc.GrpcCallableFactory; import com.google.api.gax.grpc.GrpcStubCallableFactory; @@ -31,18 +31,19 @@ import com.google.api.gax.rpc.StreamingCallSettings; import com.google.api.gax.rpc.UnaryCallSettings; import com.google.api.gax.rpc.UnaryCallable; +import com.google.longrunning.Operation; import com.google.longrunning.stub.OperationsStub; import javax.annotation.Generated; -// AUTO-GENERATED DOCUMENTATION AND CLASS +// AUTO-GENERATED DOCUMENTATION AND CLASS. /** - * gRPC callable factory implementation for BigQuery Storage API. + * gRPC callable factory implementation for the BigQueryRead service API. * *

This class is for advanced usage. */ @Generated("by gapic-generator") -@BetaApi("The surface for use by generated code is not stable yet and may change in the future.") public class GrpcBigQueryReadCallableFactory implements GrpcStubCallableFactory { + @Override public UnaryCallable createUnaryCallable( GrpcCallSettings grpcCallSettings, @@ -55,61 +56,58 @@ public UnaryCallable createUnaryCalla public UnaryCallable createPagedCallable( GrpcCallSettings grpcCallSettings, - PagedCallSettings pagedCallSettings, + PagedCallSettings callSettings, ClientContext clientContext) { - return GrpcCallableFactory.createPagedCallable( - grpcCallSettings, pagedCallSettings, clientContext); + return GrpcCallableFactory.createPagedCallable(grpcCallSettings, callSettings, clientContext); } @Override public UnaryCallable createBatchingCallable( GrpcCallSettings grpcCallSettings, - BatchingCallSettings batchingCallSettings, + BatchingCallSettings callSettings, ClientContext clientContext) { return GrpcCallableFactory.createBatchingCallable( - grpcCallSettings, batchingCallSettings, clientContext); + grpcCallSettings, callSettings, clientContext); } - @BetaApi( - "The surface for long-running operations is not stable yet and may change in the future.") @Override public OperationCallable createOperationCallable( - GrpcCallSettings grpcCallSettings, - OperationCallSettings operationCallSettings, + GrpcCallSettings grpcCallSettings, + OperationCallSettings callSettings, ClientContext clientContext, OperationsStub operationsStub) { return GrpcCallableFactory.createOperationCallable( - grpcCallSettings, operationCallSettings, clientContext, operationsStub); + grpcCallSettings, callSettings, clientContext, operationsStub); } @Override public BidiStreamingCallable createBidiStreamingCallable( GrpcCallSettings grpcCallSettings, - StreamingCallSettings streamingCallSettings, + StreamingCallSettings callSettings, ClientContext clientContext) { return GrpcCallableFactory.createBidiStreamingCallable( - grpcCallSettings, streamingCallSettings, clientContext); + grpcCallSettings, callSettings, clientContext); } @Override public ServerStreamingCallable createServerStreamingCallable( GrpcCallSettings grpcCallSettings, - ServerStreamingCallSettings streamingCallSettings, + ServerStreamingCallSettings callSettings, ClientContext clientContext) { return GrpcCallableFactory.createServerStreamingCallable( - grpcCallSettings, streamingCallSettings, clientContext); + grpcCallSettings, callSettings, clientContext); } @Override public ClientStreamingCallable createClientStreamingCallable( GrpcCallSettings grpcCallSettings, - StreamingCallSettings streamingCallSettings, + StreamingCallSettings callSettings, ClientContext clientContext) { return GrpcCallableFactory.createClientStreamingCallable( - grpcCallSettings, streamingCallSettings, clientContext); + grpcCallSettings, callSettings, clientContext); } } diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/stub/GrpcBigQueryReadStub.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/stub/GrpcBigQueryReadStub.java index 13f589f0af..fdfd95565d 100644 --- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/stub/GrpcBigQueryReadStub.java +++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/stub/GrpcBigQueryReadStub.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,9 +13,9 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.bigquery.storage.v1beta2.stub; -import com.google.api.core.BetaApi; import com.google.api.gax.core.BackgroundResource; import com.google.api.gax.core.BackgroundResourceAggregation; import com.google.api.gax.grpc.GrpcCallSettings; @@ -31,6 +31,7 @@ import com.google.cloud.bigquery.storage.v1beta2.SplitReadStreamRequest; import com.google.cloud.bigquery.storage.v1beta2.SplitReadStreamResponse; import com.google.common.collect.ImmutableMap; +import com.google.longrunning.stub.GrpcOperationsStub; import io.grpc.MethodDescriptor; import io.grpc.protobuf.ProtoUtils; import java.io.IOException; @@ -38,16 +39,14 @@ import java.util.concurrent.TimeUnit; import javax.annotation.Generated; -// AUTO-GENERATED DOCUMENTATION AND CLASS +// AUTO-GENERATED DOCUMENTATION AND CLASS. /** - * gRPC stub implementation for BigQuery Storage API. + * gRPC stub implementation for the BigQueryRead service API. * *

This class is for advanced usage and reflects the underlying API directly. */ -@Generated("by gapic-generator") -@BetaApi("A restructuring of stub classes is planned, so this may break in the future") +@Generated("by gapic-generator-java") public class GrpcBigQueryReadStub extends BigQueryReadStub { - private static final MethodDescriptor createReadSessionMethodDescriptor = MethodDescriptor.newBuilder() @@ -58,6 +57,7 @@ public class GrpcBigQueryReadStub extends BigQueryReadStub { ProtoUtils.marshaller(CreateReadSessionRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(ReadSession.getDefaultInstance())) .build(); + private static final MethodDescriptor readRowsMethodDescriptor = MethodDescriptor.newBuilder() @@ -66,6 +66,7 @@ public class GrpcBigQueryReadStub extends BigQueryReadStub { .setRequestMarshaller(ProtoUtils.marshaller(ReadRowsRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(ReadRowsResponse.getDefaultInstance())) .build(); + private static final MethodDescriptor splitReadStreamMethodDescriptor = MethodDescriptor.newBuilder() @@ -78,13 +79,13 @@ public class GrpcBigQueryReadStub extends BigQueryReadStub { ProtoUtils.marshaller(SplitReadStreamResponse.getDefaultInstance())) .build(); - private final BackgroundResource backgroundResources; - private final UnaryCallable createReadSessionCallable; private final ServerStreamingCallable readRowsCallable; private final UnaryCallable splitReadStreamCallable; + private final BackgroundResource backgroundResources; + private final GrpcOperationsStub operationsStub; private final GrpcStubCallableFactory callableFactory; public static final GrpcBigQueryReadStub create(BigQueryReadStubSettings settings) @@ -123,6 +124,7 @@ protected GrpcBigQueryReadStub( GrpcStubCallableFactory callableFactory) throws IOException { this.callableFactory = callableFactory; + this.operationsStub = GrpcOperationsStub.create(clientContext, callableFactory); GrpcCallSettings createReadSessionTransportSettings = GrpcCallSettings.newBuilder() @@ -178,7 +180,12 @@ public Map extract(SplitReadStreamRequest request) { callableFactory.createUnaryCallable( splitReadStreamTransportSettings, settings.splitReadStreamSettings(), clientContext); - backgroundResources = new BackgroundResourceAggregation(clientContext.getBackgroundResources()); + this.backgroundResources = + new BackgroundResourceAggregation(clientContext.getBackgroundResources()); + } + + public GrpcOperationsStub getOperationsStub() { + return operationsStub; } public UnaryCallable createReadSessionCallable() { diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/stub/GrpcBigQueryWriteCallableFactory.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/stub/GrpcBigQueryWriteCallableFactory.java index 985997ff97..0831c1c84e 100644 --- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/stub/GrpcBigQueryWriteCallableFactory.java +++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/stub/GrpcBigQueryWriteCallableFactory.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,9 +13,9 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.bigquery.storage.v1beta2.stub; -import com.google.api.core.BetaApi; import com.google.api.gax.grpc.GrpcCallSettings; import com.google.api.gax.grpc.GrpcCallableFactory; import com.google.api.gax.grpc.GrpcStubCallableFactory; @@ -31,18 +31,19 @@ import com.google.api.gax.rpc.StreamingCallSettings; import com.google.api.gax.rpc.UnaryCallSettings; import com.google.api.gax.rpc.UnaryCallable; +import com.google.longrunning.Operation; import com.google.longrunning.stub.OperationsStub; import javax.annotation.Generated; -// AUTO-GENERATED DOCUMENTATION AND CLASS +// AUTO-GENERATED DOCUMENTATION AND CLASS. /** - * gRPC callable factory implementation for BigQuery Storage API. + * gRPC callable factory implementation for the BigQueryWrite service API. * *

This class is for advanced usage. */ @Generated("by gapic-generator") -@BetaApi("The surface for use by generated code is not stable yet and may change in the future.") public class GrpcBigQueryWriteCallableFactory implements GrpcStubCallableFactory { + @Override public UnaryCallable createUnaryCallable( GrpcCallSettings grpcCallSettings, @@ -55,61 +56,58 @@ public UnaryCallable createUnaryCalla public UnaryCallable createPagedCallable( GrpcCallSettings grpcCallSettings, - PagedCallSettings pagedCallSettings, + PagedCallSettings callSettings, ClientContext clientContext) { - return GrpcCallableFactory.createPagedCallable( - grpcCallSettings, pagedCallSettings, clientContext); + return GrpcCallableFactory.createPagedCallable(grpcCallSettings, callSettings, clientContext); } @Override public UnaryCallable createBatchingCallable( GrpcCallSettings grpcCallSettings, - BatchingCallSettings batchingCallSettings, + BatchingCallSettings callSettings, ClientContext clientContext) { return GrpcCallableFactory.createBatchingCallable( - grpcCallSettings, batchingCallSettings, clientContext); + grpcCallSettings, callSettings, clientContext); } - @BetaApi( - "The surface for long-running operations is not stable yet and may change in the future.") @Override public OperationCallable createOperationCallable( - GrpcCallSettings grpcCallSettings, - OperationCallSettings operationCallSettings, + GrpcCallSettings grpcCallSettings, + OperationCallSettings callSettings, ClientContext clientContext, OperationsStub operationsStub) { return GrpcCallableFactory.createOperationCallable( - grpcCallSettings, operationCallSettings, clientContext, operationsStub); + grpcCallSettings, callSettings, clientContext, operationsStub); } @Override public BidiStreamingCallable createBidiStreamingCallable( GrpcCallSettings grpcCallSettings, - StreamingCallSettings streamingCallSettings, + StreamingCallSettings callSettings, ClientContext clientContext) { return GrpcCallableFactory.createBidiStreamingCallable( - grpcCallSettings, streamingCallSettings, clientContext); + grpcCallSettings, callSettings, clientContext); } @Override public ServerStreamingCallable createServerStreamingCallable( GrpcCallSettings grpcCallSettings, - ServerStreamingCallSettings streamingCallSettings, + ServerStreamingCallSettings callSettings, ClientContext clientContext) { return GrpcCallableFactory.createServerStreamingCallable( - grpcCallSettings, streamingCallSettings, clientContext); + grpcCallSettings, callSettings, clientContext); } @Override public ClientStreamingCallable createClientStreamingCallable( GrpcCallSettings grpcCallSettings, - StreamingCallSettings streamingCallSettings, + StreamingCallSettings callSettings, ClientContext clientContext) { return GrpcCallableFactory.createClientStreamingCallable( - grpcCallSettings, streamingCallSettings, clientContext); + grpcCallSettings, callSettings, clientContext); } } diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/stub/GrpcBigQueryWriteStub.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/stub/GrpcBigQueryWriteStub.java index 262b7557f9..cd44c22ae6 100644 --- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/stub/GrpcBigQueryWriteStub.java +++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/stub/GrpcBigQueryWriteStub.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,9 +13,9 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.bigquery.storage.v1beta2.stub; -import com.google.api.core.BetaApi; import com.google.api.gax.core.BackgroundResource; import com.google.api.gax.core.BackgroundResourceAggregation; import com.google.api.gax.grpc.GrpcCallSettings; @@ -36,6 +36,7 @@ import com.google.cloud.bigquery.storage.v1beta2.GetWriteStreamRequest; import com.google.cloud.bigquery.storage.v1beta2.WriteStream; import com.google.common.collect.ImmutableMap; +import com.google.longrunning.stub.GrpcOperationsStub; import io.grpc.MethodDescriptor; import io.grpc.protobuf.ProtoUtils; import java.io.IOException; @@ -43,16 +44,14 @@ import java.util.concurrent.TimeUnit; import javax.annotation.Generated; -// AUTO-GENERATED DOCUMENTATION AND CLASS +// AUTO-GENERATED DOCUMENTATION AND CLASS. /** - * gRPC stub implementation for BigQuery Storage API. + * gRPC stub implementation for the BigQueryWrite service API. * *

This class is for advanced usage and reflects the underlying API directly. */ -@Generated("by gapic-generator") -@BetaApi("A restructuring of stub classes is planned, so this may break in the future") +@Generated("by gapic-generator-java") public class GrpcBigQueryWriteStub extends BigQueryWriteStub { - private static final MethodDescriptor createWriteStreamMethodDescriptor = MethodDescriptor.newBuilder() @@ -63,6 +62,7 @@ public class GrpcBigQueryWriteStub extends BigQueryWriteStub { ProtoUtils.marshaller(CreateWriteStreamRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(WriteStream.getDefaultInstance())) .build(); + private static final MethodDescriptor appendRowsMethodDescriptor = MethodDescriptor.newBuilder() @@ -71,6 +71,7 @@ public class GrpcBigQueryWriteStub extends BigQueryWriteStub { .setRequestMarshaller(ProtoUtils.marshaller(AppendRowsRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(AppendRowsResponse.getDefaultInstance())) .build(); + private static final MethodDescriptor getWriteStreamMethodDescriptor = MethodDescriptor.newBuilder() @@ -81,6 +82,7 @@ public class GrpcBigQueryWriteStub extends BigQueryWriteStub { ProtoUtils.marshaller(GetWriteStreamRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(WriteStream.getDefaultInstance())) .build(); + private static final MethodDescriptor finalizeWriteStreamMethodDescriptor = MethodDescriptor.newBuilder() @@ -92,6 +94,7 @@ public class GrpcBigQueryWriteStub extends BigQueryWriteStub { .setResponseMarshaller( ProtoUtils.marshaller(FinalizeWriteStreamResponse.getDefaultInstance())) .build(); + private static final MethodDescriptor< BatchCommitWriteStreamsRequest, BatchCommitWriteStreamsResponse> batchCommitWriteStreamsMethodDescriptor = @@ -105,6 +108,7 @@ public class GrpcBigQueryWriteStub extends BigQueryWriteStub { .setResponseMarshaller( ProtoUtils.marshaller(BatchCommitWriteStreamsResponse.getDefaultInstance())) .build(); + private static final MethodDescriptor flushRowsMethodDescriptor = MethodDescriptor.newBuilder() @@ -114,8 +118,6 @@ public class GrpcBigQueryWriteStub extends BigQueryWriteStub { .setResponseMarshaller(ProtoUtils.marshaller(FlushRowsResponse.getDefaultInstance())) .build(); - private final BackgroundResource backgroundResources; - private final UnaryCallable createWriteStreamCallable; private final BidiStreamingCallable appendRowsCallable; private final UnaryCallable getWriteStreamCallable; @@ -125,6 +127,8 @@ public class GrpcBigQueryWriteStub extends BigQueryWriteStub { batchCommitWriteStreamsCallable; private final UnaryCallable flushRowsCallable; + private final BackgroundResource backgroundResources; + private final GrpcOperationsStub operationsStub; private final GrpcStubCallableFactory callableFactory; public static final GrpcBigQueryWriteStub create(BigQueryWriteStubSettings settings) @@ -163,6 +167,7 @@ protected GrpcBigQueryWriteStub( GrpcStubCallableFactory callableFactory) throws IOException { this.callableFactory = callableFactory; + this.operationsStub = GrpcOperationsStub.create(clientContext, callableFactory); GrpcCallSettings createWriteStreamTransportSettings = GrpcCallSettings.newBuilder() @@ -180,6 +185,15 @@ public Map extract(CreateWriteStreamRequest request) { GrpcCallSettings appendRowsTransportSettings = GrpcCallSettings.newBuilder() .setMethodDescriptor(appendRowsMethodDescriptor) + .setParamsExtractor( + new RequestParamsExtractor() { + @Override + public Map extract(AppendRowsRequest request) { + ImmutableMap.Builder params = ImmutableMap.builder(); + params.put("write_stream", String.valueOf(request.getWriteStream())); + return params.build(); + } + }) .build(); GrpcCallSettings getWriteStreamTransportSettings = GrpcCallSettings.newBuilder() @@ -262,7 +276,12 @@ public Map extract(FlushRowsRequest request) { callableFactory.createUnaryCallable( flushRowsTransportSettings, settings.flushRowsSettings(), clientContext); - backgroundResources = new BackgroundResourceAggregation(clientContext.getBackgroundResources()); + this.backgroundResources = + new BackgroundResourceAggregation(clientContext.getBackgroundResources()); + } + + public GrpcOperationsStub getOperationsStub() { + return operationsStub; } public UnaryCallable createWriteStreamCallable() { diff --git a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1/BaseBigQueryReadClientTest.java b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1/BaseBigQueryReadClientTest.java index 1217dca250..647e921610 100644 --- a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1/BaseBigQueryReadClientTest.java +++ b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1/BaseBigQueryReadClientTest.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,6 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.bigquery.storage.v1; import com.google.api.gax.core.NoCredentialsProvider; @@ -26,13 +27,15 @@ import com.google.api.gax.rpc.ServerStreamingCallable; import com.google.api.gax.rpc.StatusCode; import com.google.protobuf.AbstractMessage; -import io.grpc.Status; +import com.google.protobuf.Timestamp; import io.grpc.StatusRuntimeException; import java.io.IOException; +import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.UUID; import java.util.concurrent.ExecutionException; +import javax.annotation.Generated; import org.junit.After; import org.junit.AfterClass; import org.junit.Assert; @@ -40,31 +43,31 @@ import org.junit.BeforeClass; import org.junit.Test; -@javax.annotation.Generated("by GAPIC") +@Generated("by gapic-generator-java") public class BaseBigQueryReadClientTest { private static MockBigQueryRead mockBigQueryRead; - private static MockServiceHelper serviceHelper; + private static MockServiceHelper mockServiceHelper; private BaseBigQueryReadClient client; private LocalChannelProvider channelProvider; @BeforeClass public static void startStaticServer() { mockBigQueryRead = new MockBigQueryRead(); - serviceHelper = + mockServiceHelper = new MockServiceHelper( UUID.randomUUID().toString(), Arrays.asList(mockBigQueryRead)); - serviceHelper.start(); + mockServiceHelper.start(); } @AfterClass public static void stopServer() { - serviceHelper.stop(); + mockServiceHelper.stop(); } @Before public void setUp() throws IOException { - serviceHelper.reset(); - channelProvider = serviceHelper.createChannelProvider(); + mockServiceHelper.reset(); + channelProvider = mockServiceHelper.createChannelProvider(); BaseBigQueryReadSettings settings = BaseBigQueryReadSettings.newBuilder() .setTransportChannelProvider(channelProvider) @@ -79,12 +82,14 @@ public void tearDown() throws Exception { } @Test - @SuppressWarnings("all") - public void createReadSessionTest() { - ReadSessionName name = ReadSessionName.of("[PROJECT]", "[LOCATION]", "[SESSION]"); - TableName table = TableName.of("[PROJECT]", "[DATASET]", "[TABLE]"); + public void createReadSessionTest() throws Exception { ReadSession expectedResponse = - ReadSession.newBuilder().setName(name.toString()).setTable(table.toString()).build(); + ReadSession.newBuilder() + .setName("name3373707") + .setExpireTime(Timestamp.newBuilder().build()) + .setTable("table110115790") + .addAllStreams(new ArrayList()) + .build(); mockBigQueryRead.addResponse(expectedResponse); ProjectName parent = ProjectName.of("[PROJECT]"); @@ -96,9 +101,9 @@ public void createReadSessionTest() { List actualRequests = mockBigQueryRead.getRequests(); Assert.assertEquals(1, actualRequests.size()); - CreateReadSessionRequest actualRequest = (CreateReadSessionRequest) actualRequests.get(0); + CreateReadSessionRequest actualRequest = ((CreateReadSessionRequest) actualRequests.get(0)); - Assert.assertEquals(parent, ProjectName.parse(actualRequest.getParent())); + Assert.assertEquals(parent.toString(), actualRequest.getParent()); Assert.assertEquals(readSession, actualRequest.getReadSession()); Assert.assertEquals(maxStreamCount, actualRequest.getMaxStreamCount()); Assert.assertTrue( @@ -108,33 +113,83 @@ public void createReadSessionTest() { } @Test - @SuppressWarnings("all") public void createReadSessionExceptionTest() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockBigQueryRead.addException(exception); try { ProjectName parent = ProjectName.of("[PROJECT]"); ReadSession readSession = ReadSession.newBuilder().build(); int maxStreamCount = 940837515; + client.createReadSession(parent, readSession, maxStreamCount); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception. + } + } + + @Test + public void createReadSessionTest2() throws Exception { + ReadSession expectedResponse = + ReadSession.newBuilder() + .setName("name3373707") + .setExpireTime(Timestamp.newBuilder().build()) + .setTable("table110115790") + .addAllStreams(new ArrayList()) + .build(); + mockBigQueryRead.addResponse(expectedResponse); + + String parent = "parent-995424086"; + ReadSession readSession = ReadSession.newBuilder().build(); + int maxStreamCount = 940837515; + + ReadSession actualResponse = client.createReadSession(parent, readSession, maxStreamCount); + Assert.assertEquals(expectedResponse, actualResponse); + + List actualRequests = mockBigQueryRead.getRequests(); + Assert.assertEquals(1, actualRequests.size()); + CreateReadSessionRequest actualRequest = ((CreateReadSessionRequest) actualRequests.get(0)); + Assert.assertEquals(parent, actualRequest.getParent()); + Assert.assertEquals(readSession, actualRequest.getReadSession()); + Assert.assertEquals(maxStreamCount, actualRequest.getMaxStreamCount()); + Assert.assertTrue( + channelProvider.isHeaderSent( + ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), + GaxGrpcProperties.getDefaultApiClientHeaderPattern())); + } + + @Test + public void createReadSessionExceptionTest2() throws Exception { + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); + mockBigQueryRead.addException(exception); + + try { + String parent = "parent-995424086"; + ReadSession readSession = ReadSession.newBuilder().build(); + int maxStreamCount = 940837515; client.createReadSession(parent, readSession, maxStreamCount); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { - // Expected exception + // Expected exception. } } @Test - @SuppressWarnings("all") public void readRowsTest() throws Exception { - long rowCount = 1340416618L; - ReadRowsResponse expectedResponse = ReadRowsResponse.newBuilder().setRowCount(rowCount).build(); + ReadRowsResponse expectedResponse = + ReadRowsResponse.newBuilder() + .setRowCount(1340416618) + .setStats(StreamStats.newBuilder().build()) + .setThrottleState(ThrottleState.newBuilder().build()) + .build(); mockBigQueryRead.addResponse(expectedResponse); - ReadStreamName readStream = - ReadStreamName.of("[PROJECT]", "[LOCATION]", "[SESSION]", "[STREAM]"); ReadRowsRequest request = - ReadRowsRequest.newBuilder().setReadStream(readStream.toString()).build(); + ReadRowsRequest.newBuilder() + .setReadStream( + ReadStreamName.of("[PROJECT]", "[LOCATION]", "[SESSION]", "[STREAM]").toString()) + .setOffset(-1019779949) + .build(); MockStreamObserver responseObserver = new MockStreamObserver<>(); @@ -147,14 +202,15 @@ public void readRowsTest() throws Exception { } @Test - @SuppressWarnings("all") public void readRowsExceptionTest() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockBigQueryRead.addException(exception); - ReadStreamName readStream = - ReadStreamName.of("[PROJECT]", "[LOCATION]", "[SESSION]", "[STREAM]"); ReadRowsRequest request = - ReadRowsRequest.newBuilder().setReadStream(readStream.toString()).build(); + ReadRowsRequest.newBuilder() + .setReadStream( + ReadStreamName.of("[PROJECT]", "[LOCATION]", "[SESSION]", "[STREAM]").toString()) + .setOffset(-1019779949) + .build(); MockStreamObserver responseObserver = new MockStreamObserver<>(); @@ -166,29 +222,36 @@ public void readRowsExceptionTest() throws Exception { Assert.fail("No exception thrown"); } catch (ExecutionException e) { Assert.assertTrue(e.getCause() instanceof InvalidArgumentException); - InvalidArgumentException apiException = (InvalidArgumentException) e.getCause(); + InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause()); Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode()); } } @Test - @SuppressWarnings("all") - public void splitReadStreamTest() { - SplitReadStreamResponse expectedResponse = SplitReadStreamResponse.newBuilder().build(); + public void splitReadStreamTest() throws Exception { + SplitReadStreamResponse expectedResponse = + SplitReadStreamResponse.newBuilder() + .setPrimaryStream(ReadStream.newBuilder().build()) + .setRemainderStream(ReadStream.newBuilder().build()) + .build(); mockBigQueryRead.addResponse(expectedResponse); - ReadStreamName name = ReadStreamName.of("[PROJECT]", "[LOCATION]", "[SESSION]", "[STREAM]"); SplitReadStreamRequest request = - SplitReadStreamRequest.newBuilder().setName(name.toString()).build(); + SplitReadStreamRequest.newBuilder() + .setName( + ReadStreamName.of("[PROJECT]", "[LOCATION]", "[SESSION]", "[STREAM]").toString()) + .setFraction(-1653751294) + .build(); SplitReadStreamResponse actualResponse = client.splitReadStream(request); Assert.assertEquals(expectedResponse, actualResponse); List actualRequests = mockBigQueryRead.getRequests(); Assert.assertEquals(1, actualRequests.size()); - SplitReadStreamRequest actualRequest = (SplitReadStreamRequest) actualRequests.get(0); + SplitReadStreamRequest actualRequest = ((SplitReadStreamRequest) actualRequests.get(0)); - Assert.assertEquals(name, ReadStreamName.parse(actualRequest.getName())); + Assert.assertEquals(request.getName(), actualRequest.getName()); + Assert.assertEquals(request.getFraction(), actualRequest.getFraction(), 0.0001); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), @@ -196,20 +259,21 @@ public void splitReadStreamTest() { } @Test - @SuppressWarnings("all") public void splitReadStreamExceptionTest() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockBigQueryRead.addException(exception); try { - ReadStreamName name = ReadStreamName.of("[PROJECT]", "[LOCATION]", "[SESSION]", "[STREAM]"); SplitReadStreamRequest request = - SplitReadStreamRequest.newBuilder().setName(name.toString()).build(); - + SplitReadStreamRequest.newBuilder() + .setName( + ReadStreamName.of("[PROJECT]", "[LOCATION]", "[SESSION]", "[STREAM]").toString()) + .setFraction(-1653751294) + .build(); client.splitReadStream(request); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { - // Expected exception + // Expected exception. } } } diff --git a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1/MockBigQueryRead.java b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1/MockBigQueryRead.java index 6c578b0d17..d4972d28a7 100644 --- a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1/MockBigQueryRead.java +++ b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1/MockBigQueryRead.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,6 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.bigquery.storage.v1; import com.google.api.core.BetaApi; @@ -20,9 +21,10 @@ import com.google.protobuf.AbstractMessage; import io.grpc.ServerServiceDefinition; import java.util.List; +import javax.annotation.Generated; -@javax.annotation.Generated("by GAPIC") @BetaApi +@Generated("by gapic-generator-java") public class MockBigQueryRead implements MockGrpcService { private final MockBigQueryReadImpl serviceImpl; diff --git a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1/MockBigQueryReadImpl.java b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1/MockBigQueryReadImpl.java index b6e022ac6f..21e64df693 100644 --- a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1/MockBigQueryReadImpl.java +++ b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1/MockBigQueryReadImpl.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,6 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.bigquery.storage.v1; import com.google.api.core.BetaApi; @@ -23,9 +24,10 @@ import java.util.LinkedList; import java.util.List; import java.util.Queue; +import javax.annotation.Generated; -@javax.annotation.Generated("by GAPIC") @BetaApi +@Generated("by gapic-generator-java") public class MockBigQueryReadImpl extends BigQueryReadImplBase { private List requests; private Queue responses; @@ -62,10 +64,10 @@ public void createReadSession( Object response = responses.remove(); if (response instanceof ReadSession) { requests.add(request); - responseObserver.onNext((ReadSession) response); + responseObserver.onNext(((ReadSession) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { - responseObserver.onError((Exception) response); + responseObserver.onError(((Exception) response)); } else { responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); } @@ -76,10 +78,10 @@ public void readRows(ReadRowsRequest request, StreamObserver r Object response = responses.remove(); if (response instanceof ReadRowsResponse) { requests.add(request); - responseObserver.onNext((ReadRowsResponse) response); + responseObserver.onNext(((ReadRowsResponse) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { - responseObserver.onError((Exception) response); + responseObserver.onError(((Exception) response)); } else { responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); } @@ -91,10 +93,10 @@ public void splitReadStream( Object response = responses.remove(); if (response instanceof SplitReadStreamResponse) { requests.add(request); - responseObserver.onNext((SplitReadStreamResponse) response); + responseObserver.onNext(((SplitReadStreamResponse) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { - responseObserver.onError((Exception) response); + responseObserver.onError(((Exception) response)); } else { responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); } diff --git a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1alpha2/BigQueryWriteClientTest.java b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1alpha2/BigQueryWriteClientTest.java index f035c493f5..87fe0ff955 100644 --- a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1alpha2/BigQueryWriteClientTest.java +++ b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1alpha2/BigQueryWriteClientTest.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,6 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.bigquery.storage.v1alpha2; import com.google.api.gax.core.NoCredentialsProvider; @@ -26,25 +27,15 @@ import com.google.api.gax.rpc.BidiStreamingCallable; import com.google.api.gax.rpc.InvalidArgumentException; import com.google.api.gax.rpc.StatusCode; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.AppendRowsRequest; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.AppendRowsResponse; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.BatchCommitWriteStreamsRequest; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.BatchCommitWriteStreamsResponse; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.CreateWriteStreamRequest; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.FinalizeWriteStreamRequest; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.FinalizeWriteStreamResponse; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.FlushRowsRequest; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.FlushRowsResponse; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.GetWriteStreamRequest; -import com.google.cloud.bigquery.storage.v1alpha2.Stream.WriteStream; import com.google.protobuf.AbstractMessage; -import io.grpc.Status; +import com.google.protobuf.Int64Value; import io.grpc.StatusRuntimeException; import java.io.IOException; import java.util.Arrays; import java.util.List; import java.util.UUID; import java.util.concurrent.ExecutionException; +import javax.annotation.Generated; import org.junit.After; import org.junit.AfterClass; import org.junit.Assert; @@ -52,31 +43,31 @@ import org.junit.BeforeClass; import org.junit.Test; -@javax.annotation.Generated("by GAPIC") +@Generated("by gapic-generator-java") public class BigQueryWriteClientTest { - private static MockBigQueryWrite mockBigQueryWrite; - private static MockServiceHelper serviceHelper; + private static MockServiceHelper mockServiceHelper; private BigQueryWriteClient client; private LocalChannelProvider channelProvider; + private static MockBigQueryWrite mockBigQueryWrite; @BeforeClass public static void startStaticServer() { mockBigQueryWrite = new MockBigQueryWrite(); - serviceHelper = + mockServiceHelper = new MockServiceHelper( UUID.randomUUID().toString(), Arrays.asList(mockBigQueryWrite)); - serviceHelper.start(); + mockServiceHelper.start(); } @AfterClass public static void stopServer() { - serviceHelper.stop(); + mockServiceHelper.stop(); } @Before public void setUp() throws IOException { - serviceHelper.reset(); - channelProvider = serviceHelper.createChannelProvider(); + mockServiceHelper.reset(); + channelProvider = mockServiceHelper.createChannelProvider(); BigQueryWriteSettings settings = BigQueryWriteSettings.newBuilder() .setTransportChannelProvider(channelProvider) @@ -91,25 +82,22 @@ public void tearDown() throws Exception { } @Test - @SuppressWarnings("all") - public void createWriteStreamTest() { - WriteStreamName name = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]"); - String externalId = "externalId-1153075697"; - WriteStream expectedResponse = - WriteStream.newBuilder().setName(name.toString()).setExternalId(externalId).build(); + public void createWriteStreamTest() throws Exception { + Stream.WriteStream expectedResponse = Stream.WriteStream.newBuilder().build(); mockBigQueryWrite.addResponse(expectedResponse); TableName parent = TableName.of("[PROJECT]", "[DATASET]", "[TABLE]"); - WriteStream writeStream = WriteStream.newBuilder().build(); + Stream.WriteStream writeStream = Stream.WriteStream.newBuilder().build(); - WriteStream actualResponse = client.createWriteStream(parent, writeStream); + Stream.WriteStream actualResponse = client.createWriteStream(parent, writeStream); Assert.assertEquals(expectedResponse, actualResponse); List actualRequests = mockBigQueryWrite.getRequests(); Assert.assertEquals(1, actualRequests.size()); - CreateWriteStreamRequest actualRequest = (CreateWriteStreamRequest) actualRequests.get(0); + Storage.CreateWriteStreamRequest actualRequest = + ((Storage.CreateWriteStreamRequest) actualRequests.get(0)); - Assert.assertEquals(parent, TableName.parse(actualRequest.getParent())); + Assert.assertEquals(parent.toString(), actualRequest.getParent()); Assert.assertEquals(writeStream, actualRequest.getWriteStream()); Assert.assertTrue( channelProvider.isHeaderSent( @@ -118,96 +106,133 @@ public void createWriteStreamTest() { } @Test - @SuppressWarnings("all") public void createWriteStreamExceptionTest() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockBigQueryWrite.addException(exception); try { TableName parent = TableName.of("[PROJECT]", "[DATASET]", "[TABLE]"); - WriteStream writeStream = WriteStream.newBuilder().build(); + Stream.WriteStream writeStream = Stream.WriteStream.newBuilder().build(); + client.createWriteStream(parent, writeStream); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception. + } + } + @Test + public void createWriteStreamTest2() throws Exception { + Stream.WriteStream expectedResponse = Stream.WriteStream.newBuilder().build(); + mockBigQueryWrite.addResponse(expectedResponse); + + String parent = "parent-995424086"; + Stream.WriteStream writeStream = Stream.WriteStream.newBuilder().build(); + + Stream.WriteStream actualResponse = client.createWriteStream(parent, writeStream); + Assert.assertEquals(expectedResponse, actualResponse); + + List actualRequests = mockBigQueryWrite.getRequests(); + Assert.assertEquals(1, actualRequests.size()); + Storage.CreateWriteStreamRequest actualRequest = + ((Storage.CreateWriteStreamRequest) actualRequests.get(0)); + + Assert.assertEquals(parent, actualRequest.getParent()); + Assert.assertEquals(writeStream, actualRequest.getWriteStream()); + Assert.assertTrue( + channelProvider.isHeaderSent( + ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), + GaxGrpcProperties.getDefaultApiClientHeaderPattern())); + } + + @Test + public void createWriteStreamExceptionTest2() throws Exception { + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); + mockBigQueryWrite.addException(exception); + + try { + String parent = "parent-995424086"; + Stream.WriteStream writeStream = Stream.WriteStream.newBuilder().build(); client.createWriteStream(parent, writeStream); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { - // Expected exception + // Expected exception. } } @Test - @SuppressWarnings("all") public void appendRowsTest() throws Exception { - long offset = 1019779949L; - AppendRowsResponse expectedResponse = AppendRowsResponse.newBuilder().setOffset(offset).build(); + Storage.AppendRowsResponse expectedResponse = Storage.AppendRowsResponse.newBuilder().build(); mockBigQueryWrite.addResponse(expectedResponse); - WriteStreamName writeStream = - WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]"); - AppendRowsRequest request = - AppendRowsRequest.newBuilder().setWriteStream(writeStream.toString()).build(); + Storage.AppendRowsRequest request = + Storage.AppendRowsRequest.newBuilder() + .setWriteStream( + WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]").toString()) + .setOffset(Int64Value.newBuilder().build()) + .setIgnoreUnknownFields(true) + .build(); - MockStreamObserver responseObserver = new MockStreamObserver<>(); + MockStreamObserver responseObserver = new MockStreamObserver<>(); - BidiStreamingCallable callable = + BidiStreamingCallable callable = client.appendRowsCallable(); - ApiStreamObserver requestObserver = + ApiStreamObserver requestObserver = callable.bidiStreamingCall(responseObserver); requestObserver.onNext(request); requestObserver.onCompleted(); - List actualResponses = responseObserver.future().get(); + List actualResponses = responseObserver.future().get(); Assert.assertEquals(1, actualResponses.size()); Assert.assertEquals(expectedResponse, actualResponses.get(0)); } @Test - @SuppressWarnings("all") public void appendRowsExceptionTest() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockBigQueryWrite.addException(exception); - WriteStreamName writeStream = - WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]"); - AppendRowsRequest request = - AppendRowsRequest.newBuilder().setWriteStream(writeStream.toString()).build(); + Storage.AppendRowsRequest request = + Storage.AppendRowsRequest.newBuilder() + .setWriteStream( + WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]").toString()) + .setOffset(Int64Value.newBuilder().build()) + .setIgnoreUnknownFields(true) + .build(); - MockStreamObserver responseObserver = new MockStreamObserver<>(); + MockStreamObserver responseObserver = new MockStreamObserver<>(); - BidiStreamingCallable callable = + BidiStreamingCallable callable = client.appendRowsCallable(); - ApiStreamObserver requestObserver = + ApiStreamObserver requestObserver = callable.bidiStreamingCall(responseObserver); requestObserver.onNext(request); try { - List actualResponses = responseObserver.future().get(); + List actualResponses = responseObserver.future().get(); Assert.fail("No exception thrown"); } catch (ExecutionException e) { Assert.assertTrue(e.getCause() instanceof InvalidArgumentException); - InvalidArgumentException apiException = (InvalidArgumentException) e.getCause(); + InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause()); Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode()); } } @Test - @SuppressWarnings("all") - public void getWriteStreamTest() { - WriteStreamName name2 = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]"); - String externalId = "externalId-1153075697"; - WriteStream expectedResponse = - WriteStream.newBuilder().setName(name2.toString()).setExternalId(externalId).build(); + public void getWriteStreamTest() throws Exception { + Stream.WriteStream expectedResponse = Stream.WriteStream.newBuilder().build(); mockBigQueryWrite.addResponse(expectedResponse); WriteStreamName name = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]"); - WriteStream actualResponse = client.getWriteStream(name); + Stream.WriteStream actualResponse = client.getWriteStream(name); Assert.assertEquals(expectedResponse, actualResponse); List actualRequests = mockBigQueryWrite.getRequests(); Assert.assertEquals(1, actualRequests.size()); - GetWriteStreamRequest actualRequest = (GetWriteStreamRequest) actualRequests.get(0); + Storage.GetWriteStreamRequest actualRequest = + ((Storage.GetWriteStreamRequest) actualRequests.get(0)); - Assert.assertEquals(name, WriteStreamName.parse(actualRequest.getName())); + Assert.assertEquals(name.toString(), actualRequest.getName()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), @@ -215,39 +240,72 @@ public void getWriteStreamTest() { } @Test - @SuppressWarnings("all") public void getWriteStreamExceptionTest() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockBigQueryWrite.addException(exception); try { WriteStreamName name = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]"); + client.getWriteStream(name); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception. + } + } + @Test + public void getWriteStreamTest2() throws Exception { + Stream.WriteStream expectedResponse = Stream.WriteStream.newBuilder().build(); + mockBigQueryWrite.addResponse(expectedResponse); + + String name = "name3373707"; + + Stream.WriteStream actualResponse = client.getWriteStream(name); + Assert.assertEquals(expectedResponse, actualResponse); + + List actualRequests = mockBigQueryWrite.getRequests(); + Assert.assertEquals(1, actualRequests.size()); + Storage.GetWriteStreamRequest actualRequest = + ((Storage.GetWriteStreamRequest) actualRequests.get(0)); + + Assert.assertEquals(name, actualRequest.getName()); + Assert.assertTrue( + channelProvider.isHeaderSent( + ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), + GaxGrpcProperties.getDefaultApiClientHeaderPattern())); + } + + @Test + public void getWriteStreamExceptionTest2() throws Exception { + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); + mockBigQueryWrite.addException(exception); + + try { + String name = "name3373707"; client.getWriteStream(name); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { - // Expected exception + // Expected exception. } } @Test - @SuppressWarnings("all") - public void finalizeWriteStreamTest() { - long rowCount = 1340416618L; - FinalizeWriteStreamResponse expectedResponse = - FinalizeWriteStreamResponse.newBuilder().setRowCount(rowCount).build(); + public void finalizeWriteStreamTest() throws Exception { + Storage.FinalizeWriteStreamResponse expectedResponse = + Storage.FinalizeWriteStreamResponse.newBuilder().build(); mockBigQueryWrite.addResponse(expectedResponse); WriteStreamName name = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]"); - FinalizeWriteStreamResponse actualResponse = client.finalizeWriteStream(name); + Storage.FinalizeWriteStreamResponse actualResponse = client.finalizeWriteStream(name); Assert.assertEquals(expectedResponse, actualResponse); List actualRequests = mockBigQueryWrite.getRequests(); Assert.assertEquals(1, actualRequests.size()); - FinalizeWriteStreamRequest actualRequest = (FinalizeWriteStreamRequest) actualRequests.get(0); + Storage.FinalizeWriteStreamRequest actualRequest = + ((Storage.FinalizeWriteStreamRequest) actualRequests.get(0)); - Assert.assertEquals(name, WriteStreamName.parse(actualRequest.getName())); + Assert.assertEquals(name.toString(), actualRequest.getName()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), @@ -255,39 +313,73 @@ public void finalizeWriteStreamTest() { } @Test - @SuppressWarnings("all") public void finalizeWriteStreamExceptionTest() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockBigQueryWrite.addException(exception); try { WriteStreamName name = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]"); + client.finalizeWriteStream(name); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception. + } + } + + @Test + public void finalizeWriteStreamTest2() throws Exception { + Storage.FinalizeWriteStreamResponse expectedResponse = + Storage.FinalizeWriteStreamResponse.newBuilder().build(); + mockBigQueryWrite.addResponse(expectedResponse); + + String name = "name3373707"; + Storage.FinalizeWriteStreamResponse actualResponse = client.finalizeWriteStream(name); + Assert.assertEquals(expectedResponse, actualResponse); + + List actualRequests = mockBigQueryWrite.getRequests(); + Assert.assertEquals(1, actualRequests.size()); + Storage.FinalizeWriteStreamRequest actualRequest = + ((Storage.FinalizeWriteStreamRequest) actualRequests.get(0)); + + Assert.assertEquals(name, actualRequest.getName()); + Assert.assertTrue( + channelProvider.isHeaderSent( + ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), + GaxGrpcProperties.getDefaultApiClientHeaderPattern())); + } + + @Test + public void finalizeWriteStreamExceptionTest2() throws Exception { + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); + mockBigQueryWrite.addException(exception); + + try { + String name = "name3373707"; client.finalizeWriteStream(name); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { - // Expected exception + // Expected exception. } } @Test - @SuppressWarnings("all") - public void batchCommitWriteStreamsTest() { - BatchCommitWriteStreamsResponse expectedResponse = - BatchCommitWriteStreamsResponse.newBuilder().build(); + public void batchCommitWriteStreamsTest() throws Exception { + Storage.BatchCommitWriteStreamsResponse expectedResponse = + Storage.BatchCommitWriteStreamsResponse.newBuilder().build(); mockBigQueryWrite.addResponse(expectedResponse); TableName parent = TableName.of("[PROJECT]", "[DATASET]", "[TABLE]"); - BatchCommitWriteStreamsResponse actualResponse = client.batchCommitWriteStreams(parent); + Storage.BatchCommitWriteStreamsResponse actualResponse = client.batchCommitWriteStreams(parent); Assert.assertEquals(expectedResponse, actualResponse); List actualRequests = mockBigQueryWrite.getRequests(); Assert.assertEquals(1, actualRequests.size()); - BatchCommitWriteStreamsRequest actualRequest = - (BatchCommitWriteStreamsRequest) actualRequests.get(0); + Storage.BatchCommitWriteStreamsRequest actualRequest = + ((Storage.BatchCommitWriteStreamsRequest) actualRequests.get(0)); - Assert.assertEquals(parent, TableName.parse(actualRequest.getParent())); + Assert.assertEquals(parent.toString(), actualRequest.getParent()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), @@ -295,39 +387,72 @@ public void batchCommitWriteStreamsTest() { } @Test - @SuppressWarnings("all") public void batchCommitWriteStreamsExceptionTest() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockBigQueryWrite.addException(exception); try { TableName parent = TableName.of("[PROJECT]", "[DATASET]", "[TABLE]"); + client.batchCommitWriteStreams(parent); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception. + } + } + + @Test + public void batchCommitWriteStreamsTest2() throws Exception { + Storage.BatchCommitWriteStreamsResponse expectedResponse = + Storage.BatchCommitWriteStreamsResponse.newBuilder().build(); + mockBigQueryWrite.addResponse(expectedResponse); + + String parent = "parent-995424086"; + + Storage.BatchCommitWriteStreamsResponse actualResponse = client.batchCommitWriteStreams(parent); + Assert.assertEquals(expectedResponse, actualResponse); + + List actualRequests = mockBigQueryWrite.getRequests(); + Assert.assertEquals(1, actualRequests.size()); + Storage.BatchCommitWriteStreamsRequest actualRequest = + ((Storage.BatchCommitWriteStreamsRequest) actualRequests.get(0)); + + Assert.assertEquals(parent, actualRequest.getParent()); + Assert.assertTrue( + channelProvider.isHeaderSent( + ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), + GaxGrpcProperties.getDefaultApiClientHeaderPattern())); + } + @Test + public void batchCommitWriteStreamsExceptionTest2() throws Exception { + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); + mockBigQueryWrite.addException(exception); + + try { + String parent = "parent-995424086"; client.batchCommitWriteStreams(parent); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { - // Expected exception + // Expected exception. } } @Test - @SuppressWarnings("all") - public void flushRowsTest() { - long offset = 1019779949L; - FlushRowsResponse expectedResponse = FlushRowsResponse.newBuilder().setOffset(offset).build(); + public void flushRowsTest() throws Exception { + Storage.FlushRowsResponse expectedResponse = Storage.FlushRowsResponse.newBuilder().build(); mockBigQueryWrite.addResponse(expectedResponse); WriteStreamName writeStream = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]"); - FlushRowsResponse actualResponse = client.flushRows(writeStream); + Storage.FlushRowsResponse actualResponse = client.flushRows(writeStream); Assert.assertEquals(expectedResponse, actualResponse); List actualRequests = mockBigQueryWrite.getRequests(); Assert.assertEquals(1, actualRequests.size()); - FlushRowsRequest actualRequest = (FlushRowsRequest) actualRequests.get(0); + Storage.FlushRowsRequest actualRequest = ((Storage.FlushRowsRequest) actualRequests.get(0)); - Assert.assertEquals(writeStream, WriteStreamName.parse(actualRequest.getWriteStream())); + Assert.assertEquals(writeStream.toString(), actualRequest.getWriteStream()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), @@ -335,19 +460,52 @@ public void flushRowsTest() { } @Test - @SuppressWarnings("all") public void flushRowsExceptionTest() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockBigQueryWrite.addException(exception); try { WriteStreamName writeStream = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]"); + client.flushRows(writeStream); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception. + } + } + + @Test + public void flushRowsTest2() throws Exception { + Storage.FlushRowsResponse expectedResponse = Storage.FlushRowsResponse.newBuilder().build(); + mockBigQueryWrite.addResponse(expectedResponse); + + String writeStream = "writeStream1412231231"; + Storage.FlushRowsResponse actualResponse = client.flushRows(writeStream); + Assert.assertEquals(expectedResponse, actualResponse); + + List actualRequests = mockBigQueryWrite.getRequests(); + Assert.assertEquals(1, actualRequests.size()); + Storage.FlushRowsRequest actualRequest = ((Storage.FlushRowsRequest) actualRequests.get(0)); + + Assert.assertEquals(writeStream, actualRequest.getWriteStream()); + Assert.assertTrue( + channelProvider.isHeaderSent( + ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), + GaxGrpcProperties.getDefaultApiClientHeaderPattern())); + } + + @Test + public void flushRowsExceptionTest2() throws Exception { + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); + mockBigQueryWrite.addException(exception); + + try { + String writeStream = "writeStream1412231231"; client.flushRows(writeStream); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { - // Expected exception + // Expected exception. } } } diff --git a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1alpha2/MockBigQueryWrite.java b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1alpha2/MockBigQueryWrite.java index 543996d5e5..14652dc6d0 100644 --- a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1alpha2/MockBigQueryWrite.java +++ b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1alpha2/MockBigQueryWrite.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,6 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.bigquery.storage.v1alpha2; import com.google.api.core.BetaApi; @@ -20,9 +21,10 @@ import com.google.protobuf.AbstractMessage; import io.grpc.ServerServiceDefinition; import java.util.List; +import javax.annotation.Generated; -@javax.annotation.Generated("by GAPIC") @BetaApi +@Generated("by gapic-generator-java") public class MockBigQueryWrite implements MockGrpcService { private final MockBigQueryWriteImpl serviceImpl; diff --git a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1alpha2/MockBigQueryWriteImpl.java b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1alpha2/MockBigQueryWriteImpl.java index ecc8e99e05..e63712321a 100644 --- a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1alpha2/MockBigQueryWriteImpl.java +++ b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1alpha2/MockBigQueryWriteImpl.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,30 +13,21 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.bigquery.storage.v1alpha2; import com.google.api.core.BetaApi; import com.google.cloud.bigquery.storage.v1alpha2.BigQueryWriteGrpc.BigQueryWriteImplBase; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.AppendRowsRequest; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.AppendRowsResponse; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.BatchCommitWriteStreamsRequest; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.BatchCommitWriteStreamsResponse; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.CreateWriteStreamRequest; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.FinalizeWriteStreamRequest; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.FinalizeWriteStreamResponse; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.FlushRowsRequest; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.FlushRowsResponse; -import com.google.cloud.bigquery.storage.v1alpha2.Storage.GetWriteStreamRequest; -import com.google.cloud.bigquery.storage.v1alpha2.Stream.WriteStream; import com.google.protobuf.AbstractMessage; import io.grpc.stub.StreamObserver; import java.util.ArrayList; import java.util.LinkedList; import java.util.List; import java.util.Queue; +import javax.annotation.Generated; -@javax.annotation.Generated("by GAPIC") @BetaApi +@Generated("by gapic-generator-java") public class MockBigQueryWriteImpl extends BigQueryWriteImplBase { private List requests; private Queue responses; @@ -69,32 +60,33 @@ public void reset() { @Override public void createWriteStream( - CreateWriteStreamRequest request, StreamObserver responseObserver) { + Storage.CreateWriteStreamRequest request, + StreamObserver responseObserver) { Object response = responses.remove(); - if (response instanceof WriteStream) { + if (response instanceof Stream.WriteStream) { requests.add(request); - responseObserver.onNext((WriteStream) response); + responseObserver.onNext(((Stream.WriteStream) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { - responseObserver.onError((Exception) response); + responseObserver.onError(((Exception) response)); } else { responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); } } @Override - public StreamObserver appendRows( - final StreamObserver responseObserver) { - StreamObserver requestObserver = - new StreamObserver() { + public StreamObserver appendRows( + final StreamObserver responseObserver) { + StreamObserver requestObserver = + new StreamObserver() { @Override - public void onNext(AppendRowsRequest value) { + public void onNext(Storage.AppendRowsRequest value) { requests.add(value); final Object response = responses.remove(); - if (response instanceof AppendRowsResponse) { - responseObserver.onNext((AppendRowsResponse) response); + if (response instanceof Storage.AppendRowsResponse) { + responseObserver.onNext(((Storage.AppendRowsResponse) response)); } else if (response instanceof Exception) { - responseObserver.onError((Exception) response); + responseObserver.onError(((Exception) response)); } else { responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); } @@ -115,14 +107,14 @@ public void onCompleted() { @Override public void getWriteStream( - GetWriteStreamRequest request, StreamObserver responseObserver) { + Storage.GetWriteStreamRequest request, StreamObserver responseObserver) { Object response = responses.remove(); - if (response instanceof WriteStream) { + if (response instanceof Stream.WriteStream) { requests.add(request); - responseObserver.onNext((WriteStream) response); + responseObserver.onNext(((Stream.WriteStream) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { - responseObserver.onError((Exception) response); + responseObserver.onError(((Exception) response)); } else { responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); } @@ -130,15 +122,15 @@ public void getWriteStream( @Override public void finalizeWriteStream( - FinalizeWriteStreamRequest request, - StreamObserver responseObserver) { + Storage.FinalizeWriteStreamRequest request, + StreamObserver responseObserver) { Object response = responses.remove(); - if (response instanceof FinalizeWriteStreamResponse) { + if (response instanceof Storage.FinalizeWriteStreamResponse) { requests.add(request); - responseObserver.onNext((FinalizeWriteStreamResponse) response); + responseObserver.onNext(((Storage.FinalizeWriteStreamResponse) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { - responseObserver.onError((Exception) response); + responseObserver.onError(((Exception) response)); } else { responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); } @@ -146,15 +138,15 @@ public void finalizeWriteStream( @Override public void batchCommitWriteStreams( - BatchCommitWriteStreamsRequest request, - StreamObserver responseObserver) { + Storage.BatchCommitWriteStreamsRequest request, + StreamObserver responseObserver) { Object response = responses.remove(); - if (response instanceof BatchCommitWriteStreamsResponse) { + if (response instanceof Storage.BatchCommitWriteStreamsResponse) { requests.add(request); - responseObserver.onNext((BatchCommitWriteStreamsResponse) response); + responseObserver.onNext(((Storage.BatchCommitWriteStreamsResponse) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { - responseObserver.onError((Exception) response); + responseObserver.onError(((Exception) response)); } else { responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); } @@ -162,14 +154,15 @@ public void batchCommitWriteStreams( @Override public void flushRows( - FlushRowsRequest request, StreamObserver responseObserver) { + Storage.FlushRowsRequest request, + StreamObserver responseObserver) { Object response = responses.remove(); - if (response instanceof FlushRowsResponse) { + if (response instanceof Storage.FlushRowsResponse) { requests.add(request); - responseObserver.onNext((FlushRowsResponse) response); + responseObserver.onNext(((Storage.FlushRowsResponse) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { - responseObserver.onError((Exception) response); + responseObserver.onError(((Exception) response)); } else { responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); } diff --git a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta1/BaseBigQueryStorageClientTest.java b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta1/BaseBigQueryStorageClientTest.java index 7d3c752e11..f0663837ef 100644 --- a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta1/BaseBigQueryStorageClientTest.java +++ b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta1/BaseBigQueryStorageClientTest.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,6 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.bigquery.storage.v1beta1; import com.google.api.gax.core.NoCredentialsProvider; @@ -25,27 +26,15 @@ import com.google.api.gax.rpc.InvalidArgumentException; import com.google.api.gax.rpc.ServerStreamingCallable; import com.google.api.gax.rpc.StatusCode; -import com.google.cloud.bigquery.storage.v1beta1.Storage.BatchCreateReadSessionStreamsRequest; -import com.google.cloud.bigquery.storage.v1beta1.Storage.BatchCreateReadSessionStreamsResponse; -import com.google.cloud.bigquery.storage.v1beta1.Storage.CreateReadSessionRequest; -import com.google.cloud.bigquery.storage.v1beta1.Storage.FinalizeStreamRequest; -import com.google.cloud.bigquery.storage.v1beta1.Storage.ReadRowsRequest; -import com.google.cloud.bigquery.storage.v1beta1.Storage.ReadRowsResponse; -import com.google.cloud.bigquery.storage.v1beta1.Storage.ReadSession; -import com.google.cloud.bigquery.storage.v1beta1.Storage.SplitReadStreamRequest; -import com.google.cloud.bigquery.storage.v1beta1.Storage.SplitReadStreamResponse; -import com.google.cloud.bigquery.storage.v1beta1.Storage.Stream; -import com.google.cloud.bigquery.storage.v1beta1.Storage.StreamPosition; -import com.google.cloud.bigquery.storage.v1beta1.TableReferenceProto.TableReference; import com.google.protobuf.AbstractMessage; import com.google.protobuf.Empty; -import io.grpc.Status; import io.grpc.StatusRuntimeException; import java.io.IOException; import java.util.Arrays; import java.util.List; import java.util.UUID; import java.util.concurrent.ExecutionException; +import javax.annotation.Generated; import org.junit.After; import org.junit.AfterClass; import org.junit.Assert; @@ -53,31 +42,31 @@ import org.junit.BeforeClass; import org.junit.Test; -@javax.annotation.Generated("by GAPIC") +@Generated("by gapic-generator-java") public class BaseBigQueryStorageClientTest { private static MockBigQueryStorage mockBigQueryStorage; - private static MockServiceHelper serviceHelper; + private static MockServiceHelper mockServiceHelper; private BaseBigQueryStorageClient client; private LocalChannelProvider channelProvider; @BeforeClass public static void startStaticServer() { mockBigQueryStorage = new MockBigQueryStorage(); - serviceHelper = + mockServiceHelper = new MockServiceHelper( UUID.randomUUID().toString(), Arrays.asList(mockBigQueryStorage)); - serviceHelper.start(); + mockServiceHelper.start(); } @AfterClass public static void stopServer() { - serviceHelper.stop(); + mockServiceHelper.stop(); } @Before public void setUp() throws IOException { - serviceHelper.reset(); - channelProvider = serviceHelper.createChannelProvider(); + mockServiceHelper.reset(); + channelProvider = mockServiceHelper.createChannelProvider(); BaseBigQueryStorageSettings settings = BaseBigQueryStorageSettings.newBuilder() .setTransportChannelProvider(channelProvider) @@ -92,25 +81,26 @@ public void tearDown() throws Exception { } @Test - @SuppressWarnings("all") - public void createReadSessionTest() { - ReadSessionName name = ReadSessionName.of("[PROJECT]", "[LOCATION]", "[SESSION]"); - ReadSession expectedResponse = ReadSession.newBuilder().setName(name.toString()).build(); + public void createReadSessionTest() throws Exception { + Storage.ReadSession expectedResponse = Storage.ReadSession.newBuilder().build(); mockBigQueryStorage.addResponse(expectedResponse); - TableReference tableReference = TableReference.newBuilder().build(); + TableReferenceProto.TableReference tableReference = + TableReferenceProto.TableReference.newBuilder().build(); ProjectName parent = ProjectName.of("[PROJECT]"); int requestedStreams = 1017221410; - ReadSession actualResponse = client.createReadSession(tableReference, parent, requestedStreams); + Storage.ReadSession actualResponse = + client.createReadSession(tableReference, parent, requestedStreams); Assert.assertEquals(expectedResponse, actualResponse); List actualRequests = mockBigQueryStorage.getRequests(); Assert.assertEquals(1, actualRequests.size()); - CreateReadSessionRequest actualRequest = (CreateReadSessionRequest) actualRequests.get(0); + Storage.CreateReadSessionRequest actualRequest = + ((Storage.CreateReadSessionRequest) actualRequests.get(0)); Assert.assertEquals(tableReference, actualRequest.getTableReference()); - Assert.assertEquals(parent, ProjectName.parse(actualRequest.getParent())); + Assert.assertEquals(parent.toString(), actualRequest.getParent()); Assert.assertEquals(requestedStreams, actualRequest.getRequestedStreams()); Assert.assertTrue( channelProvider.isHeaderSent( @@ -119,83 +109,123 @@ public void createReadSessionTest() { } @Test - @SuppressWarnings("all") public void createReadSessionExceptionTest() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockBigQueryStorage.addException(exception); try { - TableReference tableReference = TableReference.newBuilder().build(); + TableReferenceProto.TableReference tableReference = + TableReferenceProto.TableReference.newBuilder().build(); ProjectName parent = ProjectName.of("[PROJECT]"); int requestedStreams = 1017221410; + client.createReadSession(tableReference, parent, requestedStreams); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception. + } + } + + @Test + public void createReadSessionTest2() throws Exception { + Storage.ReadSession expectedResponse = Storage.ReadSession.newBuilder().build(); + mockBigQueryStorage.addResponse(expectedResponse); + + TableReferenceProto.TableReference tableReference = + TableReferenceProto.TableReference.newBuilder().build(); + String parent = "parent-995424086"; + int requestedStreams = 1017221410; + + Storage.ReadSession actualResponse = + client.createReadSession(tableReference, parent, requestedStreams); + Assert.assertEquals(expectedResponse, actualResponse); + + List actualRequests = mockBigQueryStorage.getRequests(); + Assert.assertEquals(1, actualRequests.size()); + Storage.CreateReadSessionRequest actualRequest = + ((Storage.CreateReadSessionRequest) actualRequests.get(0)); + Assert.assertEquals(tableReference, actualRequest.getTableReference()); + Assert.assertEquals(parent, actualRequest.getParent()); + Assert.assertEquals(requestedStreams, actualRequest.getRequestedStreams()); + Assert.assertTrue( + channelProvider.isHeaderSent( + ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), + GaxGrpcProperties.getDefaultApiClientHeaderPattern())); + } + + @Test + public void createReadSessionExceptionTest2() throws Exception { + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); + mockBigQueryStorage.addException(exception); + + try { + TableReferenceProto.TableReference tableReference = + TableReferenceProto.TableReference.newBuilder().build(); + String parent = "parent-995424086"; + int requestedStreams = 1017221410; client.createReadSession(tableReference, parent, requestedStreams); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { - // Expected exception + // Expected exception. } } @Test - @SuppressWarnings("all") public void readRowsTest() throws Exception { - long rowCount = 1340416618L; - ReadRowsResponse expectedResponse = ReadRowsResponse.newBuilder().setRowCount(rowCount).build(); + Storage.ReadRowsResponse expectedResponse = Storage.ReadRowsResponse.newBuilder().build(); mockBigQueryStorage.addResponse(expectedResponse); - StreamPosition readPosition = StreamPosition.newBuilder().build(); - ReadRowsRequest request = ReadRowsRequest.newBuilder().setReadPosition(readPosition).build(); + Storage.ReadRowsRequest request = Storage.ReadRowsRequest.newBuilder().build(); - MockStreamObserver responseObserver = new MockStreamObserver<>(); + MockStreamObserver responseObserver = new MockStreamObserver<>(); - ServerStreamingCallable callable = client.readRowsCallable(); + ServerStreamingCallable callable = + client.readRowsCallable(); callable.serverStreamingCall(request, responseObserver); - List actualResponses = responseObserver.future().get(); + List actualResponses = responseObserver.future().get(); Assert.assertEquals(1, actualResponses.size()); Assert.assertEquals(expectedResponse, actualResponses.get(0)); } @Test - @SuppressWarnings("all") public void readRowsExceptionTest() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockBigQueryStorage.addException(exception); - StreamPosition readPosition = StreamPosition.newBuilder().build(); - ReadRowsRequest request = ReadRowsRequest.newBuilder().setReadPosition(readPosition).build(); + Storage.ReadRowsRequest request = Storage.ReadRowsRequest.newBuilder().build(); - MockStreamObserver responseObserver = new MockStreamObserver<>(); + MockStreamObserver responseObserver = new MockStreamObserver<>(); - ServerStreamingCallable callable = client.readRowsCallable(); + ServerStreamingCallable callable = + client.readRowsCallable(); callable.serverStreamingCall(request, responseObserver); try { - List actualResponses = responseObserver.future().get(); + List actualResponses = responseObserver.future().get(); Assert.fail("No exception thrown"); } catch (ExecutionException e) { Assert.assertTrue(e.getCause() instanceof InvalidArgumentException); - InvalidArgumentException apiException = (InvalidArgumentException) e.getCause(); + InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause()); Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode()); } } @Test - @SuppressWarnings("all") - public void batchCreateReadSessionStreamsTest() { - BatchCreateReadSessionStreamsResponse expectedResponse = - BatchCreateReadSessionStreamsResponse.newBuilder().build(); + public void batchCreateReadSessionStreamsTest() throws Exception { + Storage.BatchCreateReadSessionStreamsResponse expectedResponse = + Storage.BatchCreateReadSessionStreamsResponse.newBuilder().build(); mockBigQueryStorage.addResponse(expectedResponse); - ReadSession session = ReadSession.newBuilder().build(); + Storage.ReadSession session = Storage.ReadSession.newBuilder().build(); int requestedStreams = 1017221410; - BatchCreateReadSessionStreamsResponse actualResponse = + Storage.BatchCreateReadSessionStreamsResponse actualResponse = client.batchCreateReadSessionStreams(session, requestedStreams); Assert.assertEquals(expectedResponse, actualResponse); List actualRequests = mockBigQueryStorage.getRequests(); Assert.assertEquals(1, actualRequests.size()); - BatchCreateReadSessionStreamsRequest actualRequest = - (BatchCreateReadSessionStreamsRequest) actualRequests.get(0); + Storage.BatchCreateReadSessionStreamsRequest actualRequest = + ((Storage.BatchCreateReadSessionStreamsRequest) actualRequests.get(0)); Assert.assertEquals(session, actualRequest.getSession()); Assert.assertEquals(requestedStreams, actualRequest.getRequestedStreams()); @@ -206,35 +236,33 @@ public void batchCreateReadSessionStreamsTest() { } @Test - @SuppressWarnings("all") public void batchCreateReadSessionStreamsExceptionTest() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockBigQueryStorage.addException(exception); try { - ReadSession session = ReadSession.newBuilder().build(); + Storage.ReadSession session = Storage.ReadSession.newBuilder().build(); int requestedStreams = 1017221410; - client.batchCreateReadSessionStreams(session, requestedStreams); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { - // Expected exception + // Expected exception. } } @Test - @SuppressWarnings("all") - public void finalizeStreamTest() { + public void finalizeStreamTest() throws Exception { Empty expectedResponse = Empty.newBuilder().build(); mockBigQueryStorage.addResponse(expectedResponse); - Stream stream = Stream.newBuilder().build(); + Storage.Stream stream = Storage.Stream.newBuilder().build(); client.finalizeStream(stream); List actualRequests = mockBigQueryStorage.getRequests(); Assert.assertEquals(1, actualRequests.size()); - FinalizeStreamRequest actualRequest = (FinalizeStreamRequest) actualRequests.get(0); + Storage.FinalizeStreamRequest actualRequest = + ((Storage.FinalizeStreamRequest) actualRequests.get(0)); Assert.assertEquals(stream, actualRequest.getStream()); Assert.assertTrue( @@ -244,35 +272,34 @@ public void finalizeStreamTest() { } @Test - @SuppressWarnings("all") public void finalizeStreamExceptionTest() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockBigQueryStorage.addException(exception); try { - Stream stream = Stream.newBuilder().build(); - + Storage.Stream stream = Storage.Stream.newBuilder().build(); client.finalizeStream(stream); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { - // Expected exception + // Expected exception. } } @Test - @SuppressWarnings("all") - public void splitReadStreamTest() { - SplitReadStreamResponse expectedResponse = SplitReadStreamResponse.newBuilder().build(); + public void splitReadStreamTest() throws Exception { + Storage.SplitReadStreamResponse expectedResponse = + Storage.SplitReadStreamResponse.newBuilder().build(); mockBigQueryStorage.addResponse(expectedResponse); - Stream originalStream = Stream.newBuilder().build(); + Storage.Stream originalStream = Storage.Stream.newBuilder().build(); - SplitReadStreamResponse actualResponse = client.splitReadStream(originalStream); + Storage.SplitReadStreamResponse actualResponse = client.splitReadStream(originalStream); Assert.assertEquals(expectedResponse, actualResponse); List actualRequests = mockBigQueryStorage.getRequests(); Assert.assertEquals(1, actualRequests.size()); - SplitReadStreamRequest actualRequest = (SplitReadStreamRequest) actualRequests.get(0); + Storage.SplitReadStreamRequest actualRequest = + ((Storage.SplitReadStreamRequest) actualRequests.get(0)); Assert.assertEquals(originalStream, actualRequest.getOriginalStream()); Assert.assertTrue( @@ -282,18 +309,16 @@ public void splitReadStreamTest() { } @Test - @SuppressWarnings("all") public void splitReadStreamExceptionTest() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockBigQueryStorage.addException(exception); try { - Stream originalStream = Stream.newBuilder().build(); - + Storage.Stream originalStream = Storage.Stream.newBuilder().build(); client.splitReadStream(originalStream); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { - // Expected exception + // Expected exception. } } } diff --git a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta1/MockBigQueryStorage.java b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta1/MockBigQueryStorage.java index 6110c0f370..36e2257abe 100644 --- a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta1/MockBigQueryStorage.java +++ b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta1/MockBigQueryStorage.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,6 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.bigquery.storage.v1beta1; import com.google.api.core.BetaApi; @@ -20,9 +21,10 @@ import com.google.protobuf.AbstractMessage; import io.grpc.ServerServiceDefinition; import java.util.List; +import javax.annotation.Generated; -@javax.annotation.Generated("by GAPIC") @BetaApi +@Generated("by gapic-generator-java") public class MockBigQueryStorage implements MockGrpcService { private final MockBigQueryStorageImpl serviceImpl; diff --git a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta1/MockBigQueryStorageImpl.java b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta1/MockBigQueryStorageImpl.java index 41197eb3e1..79dc8f2ca2 100644 --- a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta1/MockBigQueryStorageImpl.java +++ b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta1/MockBigQueryStorageImpl.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,19 +13,11 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.bigquery.storage.v1beta1; import com.google.api.core.BetaApi; import com.google.cloud.bigquery.storage.v1beta1.BigQueryStorageGrpc.BigQueryStorageImplBase; -import com.google.cloud.bigquery.storage.v1beta1.Storage.BatchCreateReadSessionStreamsRequest; -import com.google.cloud.bigquery.storage.v1beta1.Storage.BatchCreateReadSessionStreamsResponse; -import com.google.cloud.bigquery.storage.v1beta1.Storage.CreateReadSessionRequest; -import com.google.cloud.bigquery.storage.v1beta1.Storage.FinalizeStreamRequest; -import com.google.cloud.bigquery.storage.v1beta1.Storage.ReadRowsRequest; -import com.google.cloud.bigquery.storage.v1beta1.Storage.ReadRowsResponse; -import com.google.cloud.bigquery.storage.v1beta1.Storage.ReadSession; -import com.google.cloud.bigquery.storage.v1beta1.Storage.SplitReadStreamRequest; -import com.google.cloud.bigquery.storage.v1beta1.Storage.SplitReadStreamResponse; import com.google.protobuf.AbstractMessage; import com.google.protobuf.Empty; import io.grpc.stub.StreamObserver; @@ -33,9 +25,10 @@ import java.util.LinkedList; import java.util.List; import java.util.Queue; +import javax.annotation.Generated; -@javax.annotation.Generated("by GAPIC") @BetaApi +@Generated("by gapic-generator-java") public class MockBigQueryStorageImpl extends BigQueryStorageImplBase { private List requests; private Queue responses; @@ -68,28 +61,30 @@ public void reset() { @Override public void createReadSession( - CreateReadSessionRequest request, StreamObserver responseObserver) { + Storage.CreateReadSessionRequest request, + StreamObserver responseObserver) { Object response = responses.remove(); - if (response instanceof ReadSession) { + if (response instanceof Storage.ReadSession) { requests.add(request); - responseObserver.onNext((ReadSession) response); + responseObserver.onNext(((Storage.ReadSession) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { - responseObserver.onError((Exception) response); + responseObserver.onError(((Exception) response)); } else { responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); } } @Override - public void readRows(ReadRowsRequest request, StreamObserver responseObserver) { + public void readRows( + Storage.ReadRowsRequest request, StreamObserver responseObserver) { Object response = responses.remove(); - if (response instanceof ReadRowsResponse) { + if (response instanceof Storage.ReadRowsResponse) { requests.add(request); - responseObserver.onNext((ReadRowsResponse) response); + responseObserver.onNext(((Storage.ReadRowsResponse) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { - responseObserver.onError((Exception) response); + responseObserver.onError(((Exception) response)); } else { responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); } @@ -97,15 +92,15 @@ public void readRows(ReadRowsRequest request, StreamObserver r @Override public void batchCreateReadSessionStreams( - BatchCreateReadSessionStreamsRequest request, - StreamObserver responseObserver) { + Storage.BatchCreateReadSessionStreamsRequest request, + StreamObserver responseObserver) { Object response = responses.remove(); - if (response instanceof BatchCreateReadSessionStreamsResponse) { + if (response instanceof Storage.BatchCreateReadSessionStreamsResponse) { requests.add(request); - responseObserver.onNext((BatchCreateReadSessionStreamsResponse) response); + responseObserver.onNext(((Storage.BatchCreateReadSessionStreamsResponse) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { - responseObserver.onError((Exception) response); + responseObserver.onError(((Exception) response)); } else { responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); } @@ -113,14 +108,14 @@ public void batchCreateReadSessionStreams( @Override public void finalizeStream( - FinalizeStreamRequest request, StreamObserver responseObserver) { + Storage.FinalizeStreamRequest request, StreamObserver responseObserver) { Object response = responses.remove(); if (response instanceof Empty) { requests.add(request); - responseObserver.onNext((Empty) response); + responseObserver.onNext(((Empty) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { - responseObserver.onError((Exception) response); + responseObserver.onError(((Exception) response)); } else { responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); } @@ -128,14 +123,15 @@ public void finalizeStream( @Override public void splitReadStream( - SplitReadStreamRequest request, StreamObserver responseObserver) { + Storage.SplitReadStreamRequest request, + StreamObserver responseObserver) { Object response = responses.remove(); - if (response instanceof SplitReadStreamResponse) { + if (response instanceof Storage.SplitReadStreamResponse) { requests.add(request); - responseObserver.onNext((SplitReadStreamResponse) response); + responseObserver.onNext(((Storage.SplitReadStreamResponse) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { - responseObserver.onError((Exception) response); + responseObserver.onError(((Exception) response)); } else { responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); } diff --git a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/BaseBigQueryReadClientTest.java b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/BaseBigQueryReadClientTest.java index 24966efa77..e144d138a7 100644 --- a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/BaseBigQueryReadClientTest.java +++ b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/BaseBigQueryReadClientTest.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,6 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.bigquery.storage.v1beta2; import com.google.api.gax.core.NoCredentialsProvider; @@ -26,13 +27,15 @@ import com.google.api.gax.rpc.ServerStreamingCallable; import com.google.api.gax.rpc.StatusCode; import com.google.protobuf.AbstractMessage; -import io.grpc.Status; +import com.google.protobuf.Timestamp; import io.grpc.StatusRuntimeException; import java.io.IOException; +import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.UUID; import java.util.concurrent.ExecutionException; +import javax.annotation.Generated; import org.junit.After; import org.junit.AfterClass; import org.junit.Assert; @@ -40,34 +43,31 @@ import org.junit.BeforeClass; import org.junit.Test; -@javax.annotation.Generated("by GAPIC") +@Generated("by gapic-generator-java") public class BaseBigQueryReadClientTest { private static MockBigQueryRead mockBigQueryRead; - private static MockBigQueryWrite mockBigQueryWrite; - private static MockServiceHelper serviceHelper; + private static MockServiceHelper mockServiceHelper; private BaseBigQueryReadClient client; private LocalChannelProvider channelProvider; @BeforeClass public static void startStaticServer() { mockBigQueryRead = new MockBigQueryRead(); - mockBigQueryWrite = new MockBigQueryWrite(); - serviceHelper = + mockServiceHelper = new MockServiceHelper( - UUID.randomUUID().toString(), - Arrays.asList(mockBigQueryRead, mockBigQueryWrite)); - serviceHelper.start(); + UUID.randomUUID().toString(), Arrays.asList(mockBigQueryRead)); + mockServiceHelper.start(); } @AfterClass public static void stopServer() { - serviceHelper.stop(); + mockServiceHelper.stop(); } @Before public void setUp() throws IOException { - serviceHelper.reset(); - channelProvider = serviceHelper.createChannelProvider(); + mockServiceHelper.reset(); + channelProvider = mockServiceHelper.createChannelProvider(); BaseBigQueryReadSettings settings = BaseBigQueryReadSettings.newBuilder() .setTransportChannelProvider(channelProvider) @@ -82,12 +82,14 @@ public void tearDown() throws Exception { } @Test - @SuppressWarnings("all") - public void createReadSessionTest() { - ReadSessionName name = ReadSessionName.of("[PROJECT]", "[LOCATION]", "[SESSION]"); - TableName table = TableName.of("[PROJECT]", "[DATASET]", "[TABLE]"); + public void createReadSessionTest() throws Exception { ReadSession expectedResponse = - ReadSession.newBuilder().setName(name.toString()).setTable(table.toString()).build(); + ReadSession.newBuilder() + .setName("name3373707") + .setExpireTime(Timestamp.newBuilder().build()) + .setTable(TableName.of("[PROJECT]", "[DATASET]", "[TABLE]").toString()) + .addAllStreams(new ArrayList()) + .build(); mockBigQueryRead.addResponse(expectedResponse); ProjectName parent = ProjectName.of("[PROJECT]"); @@ -99,9 +101,9 @@ public void createReadSessionTest() { List actualRequests = mockBigQueryRead.getRequests(); Assert.assertEquals(1, actualRequests.size()); - CreateReadSessionRequest actualRequest = (CreateReadSessionRequest) actualRequests.get(0); + CreateReadSessionRequest actualRequest = ((CreateReadSessionRequest) actualRequests.get(0)); - Assert.assertEquals(parent, ProjectName.parse(actualRequest.getParent())); + Assert.assertEquals(parent.toString(), actualRequest.getParent()); Assert.assertEquals(readSession, actualRequest.getReadSession()); Assert.assertEquals(maxStreamCount, actualRequest.getMaxStreamCount()); Assert.assertTrue( @@ -111,33 +113,83 @@ public void createReadSessionTest() { } @Test - @SuppressWarnings("all") public void createReadSessionExceptionTest() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockBigQueryRead.addException(exception); try { ProjectName parent = ProjectName.of("[PROJECT]"); ReadSession readSession = ReadSession.newBuilder().build(); int maxStreamCount = 940837515; + client.createReadSession(parent, readSession, maxStreamCount); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception. + } + } + + @Test + public void createReadSessionTest2() throws Exception { + ReadSession expectedResponse = + ReadSession.newBuilder() + .setName("name3373707") + .setExpireTime(Timestamp.newBuilder().build()) + .setTable(TableName.of("[PROJECT]", "[DATASET]", "[TABLE]").toString()) + .addAllStreams(new ArrayList()) + .build(); + mockBigQueryRead.addResponse(expectedResponse); + + String parent = "parent-995424086"; + ReadSession readSession = ReadSession.newBuilder().build(); + int maxStreamCount = 940837515; + + ReadSession actualResponse = client.createReadSession(parent, readSession, maxStreamCount); + Assert.assertEquals(expectedResponse, actualResponse); + + List actualRequests = mockBigQueryRead.getRequests(); + Assert.assertEquals(1, actualRequests.size()); + CreateReadSessionRequest actualRequest = ((CreateReadSessionRequest) actualRequests.get(0)); + Assert.assertEquals(parent, actualRequest.getParent()); + Assert.assertEquals(readSession, actualRequest.getReadSession()); + Assert.assertEquals(maxStreamCount, actualRequest.getMaxStreamCount()); + Assert.assertTrue( + channelProvider.isHeaderSent( + ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), + GaxGrpcProperties.getDefaultApiClientHeaderPattern())); + } + + @Test + public void createReadSessionExceptionTest2() throws Exception { + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); + mockBigQueryRead.addException(exception); + + try { + String parent = "parent-995424086"; + ReadSession readSession = ReadSession.newBuilder().build(); + int maxStreamCount = 940837515; client.createReadSession(parent, readSession, maxStreamCount); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { - // Expected exception + // Expected exception. } } @Test - @SuppressWarnings("all") public void readRowsTest() throws Exception { - long rowCount = 1340416618L; - ReadRowsResponse expectedResponse = ReadRowsResponse.newBuilder().setRowCount(rowCount).build(); + ReadRowsResponse expectedResponse = + ReadRowsResponse.newBuilder() + .setRowCount(1340416618) + .setStats(StreamStats.newBuilder().build()) + .setThrottleState(ThrottleState.newBuilder().build()) + .build(); mockBigQueryRead.addResponse(expectedResponse); - ReadStreamName readStream = - ReadStreamName.of("[PROJECT]", "[LOCATION]", "[SESSION]", "[STREAM]"); ReadRowsRequest request = - ReadRowsRequest.newBuilder().setReadStream(readStream.toString()).build(); + ReadRowsRequest.newBuilder() + .setReadStream( + ReadStreamName.of("[PROJECT]", "[LOCATION]", "[SESSION]", "[STREAM]").toString()) + .setOffset(-1019779949) + .build(); MockStreamObserver responseObserver = new MockStreamObserver<>(); @@ -150,14 +202,15 @@ public void readRowsTest() throws Exception { } @Test - @SuppressWarnings("all") public void readRowsExceptionTest() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockBigQueryRead.addException(exception); - ReadStreamName readStream = - ReadStreamName.of("[PROJECT]", "[LOCATION]", "[SESSION]", "[STREAM]"); ReadRowsRequest request = - ReadRowsRequest.newBuilder().setReadStream(readStream.toString()).build(); + ReadRowsRequest.newBuilder() + .setReadStream( + ReadStreamName.of("[PROJECT]", "[LOCATION]", "[SESSION]", "[STREAM]").toString()) + .setOffset(-1019779949) + .build(); MockStreamObserver responseObserver = new MockStreamObserver<>(); @@ -169,29 +222,36 @@ public void readRowsExceptionTest() throws Exception { Assert.fail("No exception thrown"); } catch (ExecutionException e) { Assert.assertTrue(e.getCause() instanceof InvalidArgumentException); - InvalidArgumentException apiException = (InvalidArgumentException) e.getCause(); + InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause()); Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode()); } } @Test - @SuppressWarnings("all") - public void splitReadStreamTest() { - SplitReadStreamResponse expectedResponse = SplitReadStreamResponse.newBuilder().build(); + public void splitReadStreamTest() throws Exception { + SplitReadStreamResponse expectedResponse = + SplitReadStreamResponse.newBuilder() + .setPrimaryStream(ReadStream.newBuilder().build()) + .setRemainderStream(ReadStream.newBuilder().build()) + .build(); mockBigQueryRead.addResponse(expectedResponse); - ReadStreamName name = ReadStreamName.of("[PROJECT]", "[LOCATION]", "[SESSION]", "[STREAM]"); SplitReadStreamRequest request = - SplitReadStreamRequest.newBuilder().setName(name.toString()).build(); + SplitReadStreamRequest.newBuilder() + .setName( + ReadStreamName.of("[PROJECT]", "[LOCATION]", "[SESSION]", "[STREAM]").toString()) + .setFraction(-1653751294) + .build(); SplitReadStreamResponse actualResponse = client.splitReadStream(request); Assert.assertEquals(expectedResponse, actualResponse); List actualRequests = mockBigQueryRead.getRequests(); Assert.assertEquals(1, actualRequests.size()); - SplitReadStreamRequest actualRequest = (SplitReadStreamRequest) actualRequests.get(0); + SplitReadStreamRequest actualRequest = ((SplitReadStreamRequest) actualRequests.get(0)); - Assert.assertEquals(name, ReadStreamName.parse(actualRequest.getName())); + Assert.assertEquals(request.getName(), actualRequest.getName()); + Assert.assertEquals(request.getFraction(), actualRequest.getFraction(), 0.0001); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), @@ -199,20 +259,21 @@ public void splitReadStreamTest() { } @Test - @SuppressWarnings("all") public void splitReadStreamExceptionTest() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockBigQueryRead.addException(exception); try { - ReadStreamName name = ReadStreamName.of("[PROJECT]", "[LOCATION]", "[SESSION]", "[STREAM]"); SplitReadStreamRequest request = - SplitReadStreamRequest.newBuilder().setName(name.toString()).build(); - + SplitReadStreamRequest.newBuilder() + .setName( + ReadStreamName.of("[PROJECT]", "[LOCATION]", "[SESSION]", "[STREAM]").toString()) + .setFraction(-1653751294) + .build(); client.splitReadStream(request); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { - // Expected exception + // Expected exception. } } } diff --git a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/BigQueryWriteClientTest.java b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/BigQueryWriteClientTest.java index 974b7bc43e..cce8ec092a 100644 --- a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/BigQueryWriteClientTest.java +++ b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/BigQueryWriteClientTest.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,6 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.bigquery.storage.v1beta2; import com.google.api.gax.core.NoCredentialsProvider; @@ -27,13 +28,16 @@ import com.google.api.gax.rpc.InvalidArgumentException; import com.google.api.gax.rpc.StatusCode; import com.google.protobuf.AbstractMessage; -import io.grpc.Status; +import com.google.protobuf.Int64Value; +import com.google.protobuf.Timestamp; import io.grpc.StatusRuntimeException; import java.io.IOException; +import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.UUID; import java.util.concurrent.ExecutionException; +import javax.annotation.Generated; import org.junit.After; import org.junit.AfterClass; import org.junit.Assert; @@ -41,34 +45,31 @@ import org.junit.BeforeClass; import org.junit.Test; -@javax.annotation.Generated("by GAPIC") +@Generated("by gapic-generator-java") public class BigQueryWriteClientTest { - private static MockBigQueryRead mockBigQueryRead; - private static MockBigQueryWrite mockBigQueryWrite; - private static MockServiceHelper serviceHelper; + private static MockServiceHelper mockServiceHelper; private BigQueryWriteClient client; private LocalChannelProvider channelProvider; + private static MockBigQueryWrite mockBigQueryWrite; @BeforeClass public static void startStaticServer() { - mockBigQueryRead = new MockBigQueryRead(); mockBigQueryWrite = new MockBigQueryWrite(); - serviceHelper = + mockServiceHelper = new MockServiceHelper( - UUID.randomUUID().toString(), - Arrays.asList(mockBigQueryRead, mockBigQueryWrite)); - serviceHelper.start(); + UUID.randomUUID().toString(), Arrays.asList(mockBigQueryWrite)); + mockServiceHelper.start(); } @AfterClass public static void stopServer() { - serviceHelper.stop(); + mockServiceHelper.stop(); } @Before public void setUp() throws IOException { - serviceHelper.reset(); - channelProvider = serviceHelper.createChannelProvider(); + mockServiceHelper.reset(); + channelProvider = mockServiceHelper.createChannelProvider(); BigQueryWriteSettings settings = BigQueryWriteSettings.newBuilder() .setTransportChannelProvider(channelProvider) @@ -83,10 +84,14 @@ public void tearDown() throws Exception { } @Test - @SuppressWarnings("all") - public void createWriteStreamTest() { - WriteStreamName name = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]"); - WriteStream expectedResponse = WriteStream.newBuilder().setName(name.toString()).build(); + public void createWriteStreamTest() throws Exception { + WriteStream expectedResponse = + WriteStream.newBuilder() + .setName(WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]").toString()) + .setCreateTime(Timestamp.newBuilder().build()) + .setCommitTime(Timestamp.newBuilder().build()) + .setTableSchema(TableSchema.newBuilder().build()) + .build(); mockBigQueryWrite.addResponse(expectedResponse); TableName parent = TableName.of("[PROJECT]", "[DATASET]", "[TABLE]"); @@ -97,9 +102,9 @@ public void createWriteStreamTest() { List actualRequests = mockBigQueryWrite.getRequests(); Assert.assertEquals(1, actualRequests.size()); - CreateWriteStreamRequest actualRequest = (CreateWriteStreamRequest) actualRequests.get(0); + CreateWriteStreamRequest actualRequest = ((CreateWriteStreamRequest) actualRequests.get(0)); - Assert.assertEquals(parent, TableName.parse(actualRequest.getParent())); + Assert.assertEquals(parent.toString(), actualRequest.getParent()); Assert.assertEquals(writeStream, actualRequest.getWriteStream()); Assert.assertTrue( channelProvider.isHeaderSent( @@ -108,31 +113,76 @@ public void createWriteStreamTest() { } @Test - @SuppressWarnings("all") public void createWriteStreamExceptionTest() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockBigQueryWrite.addException(exception); try { TableName parent = TableName.of("[PROJECT]", "[DATASET]", "[TABLE]"); WriteStream writeStream = WriteStream.newBuilder().build(); + client.createWriteStream(parent, writeStream); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception. + } + } + + @Test + public void createWriteStreamTest2() throws Exception { + WriteStream expectedResponse = + WriteStream.newBuilder() + .setName(WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]").toString()) + .setCreateTime(Timestamp.newBuilder().build()) + .setCommitTime(Timestamp.newBuilder().build()) + .setTableSchema(TableSchema.newBuilder().build()) + .build(); + mockBigQueryWrite.addResponse(expectedResponse); + + String parent = "parent-995424086"; + WriteStream writeStream = WriteStream.newBuilder().build(); + + WriteStream actualResponse = client.createWriteStream(parent, writeStream); + Assert.assertEquals(expectedResponse, actualResponse); + + List actualRequests = mockBigQueryWrite.getRequests(); + Assert.assertEquals(1, actualRequests.size()); + CreateWriteStreamRequest actualRequest = ((CreateWriteStreamRequest) actualRequests.get(0)); + + Assert.assertEquals(parent, actualRequest.getParent()); + Assert.assertEquals(writeStream, actualRequest.getWriteStream()); + Assert.assertTrue( + channelProvider.isHeaderSent( + ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), + GaxGrpcProperties.getDefaultApiClientHeaderPattern())); + } + @Test + public void createWriteStreamExceptionTest2() throws Exception { + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); + mockBigQueryWrite.addException(exception); + + try { + String parent = "parent-995424086"; + WriteStream writeStream = WriteStream.newBuilder().build(); client.createWriteStream(parent, writeStream); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { - // Expected exception + // Expected exception. } } @Test - @SuppressWarnings("all") public void appendRowsTest() throws Exception { - AppendRowsResponse expectedResponse = AppendRowsResponse.newBuilder().build(); + AppendRowsResponse expectedResponse = + AppendRowsResponse.newBuilder().setUpdatedSchema(TableSchema.newBuilder().build()).build(); mockBigQueryWrite.addResponse(expectedResponse); - WriteStreamName writeStream = - WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]"); AppendRowsRequest request = - AppendRowsRequest.newBuilder().setWriteStream(writeStream.toString()).build(); + AppendRowsRequest.newBuilder() + .setWriteStream( + WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]").toString()) + .setOffset(Int64Value.newBuilder().build()) + .setTraceId("traceId-1067401920") + .build(); MockStreamObserver responseObserver = new MockStreamObserver<>(); @@ -150,14 +200,16 @@ public void appendRowsTest() throws Exception { } @Test - @SuppressWarnings("all") public void appendRowsExceptionTest() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockBigQueryWrite.addException(exception); - WriteStreamName writeStream = - WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]"); AppendRowsRequest request = - AppendRowsRequest.newBuilder().setWriteStream(writeStream.toString()).build(); + AppendRowsRequest.newBuilder() + .setWriteStream( + WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]").toString()) + .setOffset(Int64Value.newBuilder().build()) + .setTraceId("traceId-1067401920") + .build(); MockStreamObserver responseObserver = new MockStreamObserver<>(); @@ -173,16 +225,20 @@ public void appendRowsExceptionTest() throws Exception { Assert.fail("No exception thrown"); } catch (ExecutionException e) { Assert.assertTrue(e.getCause() instanceof InvalidArgumentException); - InvalidArgumentException apiException = (InvalidArgumentException) e.getCause(); + InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause()); Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode()); } } @Test - @SuppressWarnings("all") - public void getWriteStreamTest() { - WriteStreamName name2 = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]"); - WriteStream expectedResponse = WriteStream.newBuilder().setName(name2.toString()).build(); + public void getWriteStreamTest() throws Exception { + WriteStream expectedResponse = + WriteStream.newBuilder() + .setName(WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]").toString()) + .setCreateTime(Timestamp.newBuilder().build()) + .setCommitTime(Timestamp.newBuilder().build()) + .setTableSchema(TableSchema.newBuilder().build()) + .build(); mockBigQueryWrite.addResponse(expectedResponse); WriteStreamName name = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]"); @@ -192,9 +248,9 @@ public void getWriteStreamTest() { List actualRequests = mockBigQueryWrite.getRequests(); Assert.assertEquals(1, actualRequests.size()); - GetWriteStreamRequest actualRequest = (GetWriteStreamRequest) actualRequests.get(0); + GetWriteStreamRequest actualRequest = ((GetWriteStreamRequest) actualRequests.get(0)); - Assert.assertEquals(name, WriteStreamName.parse(actualRequest.getName())); + Assert.assertEquals(name.toString(), actualRequest.getName()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), @@ -202,27 +258,64 @@ public void getWriteStreamTest() { } @Test - @SuppressWarnings("all") public void getWriteStreamExceptionTest() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockBigQueryWrite.addException(exception); try { WriteStreamName name = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]"); + client.getWriteStream(name); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception. + } + } + + @Test + public void getWriteStreamTest2() throws Exception { + WriteStream expectedResponse = + WriteStream.newBuilder() + .setName(WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]").toString()) + .setCreateTime(Timestamp.newBuilder().build()) + .setCommitTime(Timestamp.newBuilder().build()) + .setTableSchema(TableSchema.newBuilder().build()) + .build(); + mockBigQueryWrite.addResponse(expectedResponse); + + String name = "name3373707"; + + WriteStream actualResponse = client.getWriteStream(name); + Assert.assertEquals(expectedResponse, actualResponse); + + List actualRequests = mockBigQueryWrite.getRequests(); + Assert.assertEquals(1, actualRequests.size()); + GetWriteStreamRequest actualRequest = ((GetWriteStreamRequest) actualRequests.get(0)); + + Assert.assertEquals(name, actualRequest.getName()); + Assert.assertTrue( + channelProvider.isHeaderSent( + ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), + GaxGrpcProperties.getDefaultApiClientHeaderPattern())); + } + @Test + public void getWriteStreamExceptionTest2() throws Exception { + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); + mockBigQueryWrite.addException(exception); + + try { + String name = "name3373707"; client.getWriteStream(name); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { - // Expected exception + // Expected exception. } } @Test - @SuppressWarnings("all") - public void finalizeWriteStreamTest() { - long rowCount = 1340416618L; + public void finalizeWriteStreamTest() throws Exception { FinalizeWriteStreamResponse expectedResponse = - FinalizeWriteStreamResponse.newBuilder().setRowCount(rowCount).build(); + FinalizeWriteStreamResponse.newBuilder().setRowCount(1340416618).build(); mockBigQueryWrite.addResponse(expectedResponse); WriteStreamName name = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]"); @@ -232,9 +325,9 @@ public void finalizeWriteStreamTest() { List actualRequests = mockBigQueryWrite.getRequests(); Assert.assertEquals(1, actualRequests.size()); - FinalizeWriteStreamRequest actualRequest = (FinalizeWriteStreamRequest) actualRequests.get(0); + FinalizeWriteStreamRequest actualRequest = ((FinalizeWriteStreamRequest) actualRequests.get(0)); - Assert.assertEquals(name, WriteStreamName.parse(actualRequest.getName())); + Assert.assertEquals(name.toString(), actualRequest.getName()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), @@ -242,26 +335,62 @@ public void finalizeWriteStreamTest() { } @Test - @SuppressWarnings("all") public void finalizeWriteStreamExceptionTest() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockBigQueryWrite.addException(exception); try { WriteStreamName name = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]"); + client.finalizeWriteStream(name); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception. + } + } + + @Test + public void finalizeWriteStreamTest2() throws Exception { + FinalizeWriteStreamResponse expectedResponse = + FinalizeWriteStreamResponse.newBuilder().setRowCount(1340416618).build(); + mockBigQueryWrite.addResponse(expectedResponse); + + String name = "name3373707"; + + FinalizeWriteStreamResponse actualResponse = client.finalizeWriteStream(name); + Assert.assertEquals(expectedResponse, actualResponse); + + List actualRequests = mockBigQueryWrite.getRequests(); + Assert.assertEquals(1, actualRequests.size()); + FinalizeWriteStreamRequest actualRequest = ((FinalizeWriteStreamRequest) actualRequests.get(0)); + + Assert.assertEquals(name, actualRequest.getName()); + Assert.assertTrue( + channelProvider.isHeaderSent( + ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), + GaxGrpcProperties.getDefaultApiClientHeaderPattern())); + } + + @Test + public void finalizeWriteStreamExceptionTest2() throws Exception { + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); + mockBigQueryWrite.addException(exception); + try { + String name = "name3373707"; client.finalizeWriteStream(name); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { - // Expected exception + // Expected exception. } } @Test - @SuppressWarnings("all") - public void batchCommitWriteStreamsTest() { + public void batchCommitWriteStreamsTest() throws Exception { BatchCommitWriteStreamsResponse expectedResponse = - BatchCommitWriteStreamsResponse.newBuilder().build(); + BatchCommitWriteStreamsResponse.newBuilder() + .setCommitTime(Timestamp.newBuilder().build()) + .addAllStreamErrors(new ArrayList()) + .build(); mockBigQueryWrite.addResponse(expectedResponse); String parent = "parent-995424086"; @@ -272,7 +401,7 @@ public void batchCommitWriteStreamsTest() { List actualRequests = mockBigQueryWrite.getRequests(); Assert.assertEquals(1, actualRequests.size()); BatchCommitWriteStreamsRequest actualRequest = - (BatchCommitWriteStreamsRequest) actualRequests.get(0); + ((BatchCommitWriteStreamsRequest) actualRequests.get(0)); Assert.assertEquals(parent, actualRequest.getParent()); Assert.assertTrue( @@ -282,26 +411,23 @@ public void batchCommitWriteStreamsTest() { } @Test - @SuppressWarnings("all") public void batchCommitWriteStreamsExceptionTest() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockBigQueryWrite.addException(exception); try { String parent = "parent-995424086"; - client.batchCommitWriteStreams(parent); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { - // Expected exception + // Expected exception. } } @Test - @SuppressWarnings("all") - public void flushRowsTest() { - long offset = 1019779949L; - FlushRowsResponse expectedResponse = FlushRowsResponse.newBuilder().setOffset(offset).build(); + public void flushRowsTest() throws Exception { + FlushRowsResponse expectedResponse = + FlushRowsResponse.newBuilder().setOffset(-1019779949).build(); mockBigQueryWrite.addResponse(expectedResponse); WriteStreamName writeStream = @@ -312,9 +438,9 @@ public void flushRowsTest() { List actualRequests = mockBigQueryWrite.getRequests(); Assert.assertEquals(1, actualRequests.size()); - FlushRowsRequest actualRequest = (FlushRowsRequest) actualRequests.get(0); + FlushRowsRequest actualRequest = ((FlushRowsRequest) actualRequests.get(0)); - Assert.assertEquals(writeStream, WriteStreamName.parse(actualRequest.getWriteStream())); + Assert.assertEquals(writeStream.toString(), actualRequest.getWriteStream()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), @@ -322,19 +448,53 @@ public void flushRowsTest() { } @Test - @SuppressWarnings("all") public void flushRowsExceptionTest() throws Exception { - StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT); + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockBigQueryWrite.addException(exception); try { WriteStreamName writeStream = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]"); + client.flushRows(writeStream); + Assert.fail("No exception raised"); + } catch (InvalidArgumentException e) { + // Expected exception. + } + } + + @Test + public void flushRowsTest2() throws Exception { + FlushRowsResponse expectedResponse = + FlushRowsResponse.newBuilder().setOffset(-1019779949).build(); + mockBigQueryWrite.addResponse(expectedResponse); + + String writeStream = "writeStream1412231231"; + FlushRowsResponse actualResponse = client.flushRows(writeStream); + Assert.assertEquals(expectedResponse, actualResponse); + + List actualRequests = mockBigQueryWrite.getRequests(); + Assert.assertEquals(1, actualRequests.size()); + FlushRowsRequest actualRequest = ((FlushRowsRequest) actualRequests.get(0)); + + Assert.assertEquals(writeStream, actualRequest.getWriteStream()); + Assert.assertTrue( + channelProvider.isHeaderSent( + ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), + GaxGrpcProperties.getDefaultApiClientHeaderPattern())); + } + + @Test + public void flushRowsExceptionTest2() throws Exception { + StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); + mockBigQueryWrite.addException(exception); + + try { + String writeStream = "writeStream1412231231"; client.flushRows(writeStream); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { - // Expected exception + // Expected exception. } } } diff --git a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/MockBigQueryRead.java b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/MockBigQueryRead.java index 26d09c0ef8..cd82e240fb 100644 --- a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/MockBigQueryRead.java +++ b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/MockBigQueryRead.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,6 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.bigquery.storage.v1beta2; import com.google.api.core.BetaApi; @@ -20,9 +21,10 @@ import com.google.protobuf.AbstractMessage; import io.grpc.ServerServiceDefinition; import java.util.List; +import javax.annotation.Generated; -@javax.annotation.Generated("by GAPIC") @BetaApi +@Generated("by gapic-generator-java") public class MockBigQueryRead implements MockGrpcService { private final MockBigQueryReadImpl serviceImpl; diff --git a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/MockBigQueryReadImpl.java b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/MockBigQueryReadImpl.java index 56d6b3d432..1482baca9c 100644 --- a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/MockBigQueryReadImpl.java +++ b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/MockBigQueryReadImpl.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,6 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.bigquery.storage.v1beta2; import com.google.api.core.BetaApi; @@ -23,9 +24,10 @@ import java.util.LinkedList; import java.util.List; import java.util.Queue; +import javax.annotation.Generated; -@javax.annotation.Generated("by GAPIC") @BetaApi +@Generated("by gapic-generator-java") public class MockBigQueryReadImpl extends BigQueryReadImplBase { private List requests; private Queue responses; @@ -62,10 +64,10 @@ public void createReadSession( Object response = responses.remove(); if (response instanceof ReadSession) { requests.add(request); - responseObserver.onNext((ReadSession) response); + responseObserver.onNext(((ReadSession) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { - responseObserver.onError((Exception) response); + responseObserver.onError(((Exception) response)); } else { responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); } @@ -76,10 +78,10 @@ public void readRows(ReadRowsRequest request, StreamObserver r Object response = responses.remove(); if (response instanceof ReadRowsResponse) { requests.add(request); - responseObserver.onNext((ReadRowsResponse) response); + responseObserver.onNext(((ReadRowsResponse) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { - responseObserver.onError((Exception) response); + responseObserver.onError(((Exception) response)); } else { responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); } @@ -91,10 +93,10 @@ public void splitReadStream( Object response = responses.remove(); if (response instanceof SplitReadStreamResponse) { requests.add(request); - responseObserver.onNext((SplitReadStreamResponse) response); + responseObserver.onNext(((SplitReadStreamResponse) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { - responseObserver.onError((Exception) response); + responseObserver.onError(((Exception) response)); } else { responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); } diff --git a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/MockBigQueryWrite.java b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/MockBigQueryWrite.java index ea99368e82..8adf63c1f2 100644 --- a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/MockBigQueryWrite.java +++ b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/MockBigQueryWrite.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,6 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.bigquery.storage.v1beta2; import com.google.api.core.BetaApi; @@ -20,9 +21,10 @@ import com.google.protobuf.AbstractMessage; import io.grpc.ServerServiceDefinition; import java.util.List; +import javax.annotation.Generated; -@javax.annotation.Generated("by GAPIC") @BetaApi +@Generated("by gapic-generator-java") public class MockBigQueryWrite implements MockGrpcService { private final MockBigQueryWriteImpl serviceImpl; diff --git a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/MockBigQueryWriteImpl.java b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/MockBigQueryWriteImpl.java index 654a52574d..078421f361 100644 --- a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/MockBigQueryWriteImpl.java +++ b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/MockBigQueryWriteImpl.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,6 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package com.google.cloud.bigquery.storage.v1beta2; import com.google.api.core.BetaApi; @@ -23,9 +24,10 @@ import java.util.LinkedList; import java.util.List; import java.util.Queue; +import javax.annotation.Generated; -@javax.annotation.Generated("by GAPIC") @BetaApi +@Generated("by gapic-generator-java") public class MockBigQueryWriteImpl extends BigQueryWriteImplBase { private List requests; private Queue responses; @@ -62,10 +64,10 @@ public void createWriteStream( Object response = responses.remove(); if (response instanceof WriteStream) { requests.add(request); - responseObserver.onNext((WriteStream) response); + responseObserver.onNext(((WriteStream) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { - responseObserver.onError((Exception) response); + responseObserver.onError(((Exception) response)); } else { responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); } @@ -81,9 +83,9 @@ public void onNext(AppendRowsRequest value) { requests.add(value); final Object response = responses.remove(); if (response instanceof AppendRowsResponse) { - responseObserver.onNext((AppendRowsResponse) response); + responseObserver.onNext(((AppendRowsResponse) response)); } else if (response instanceof Exception) { - responseObserver.onError((Exception) response); + responseObserver.onError(((Exception) response)); } else { responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); } @@ -108,10 +110,10 @@ public void getWriteStream( Object response = responses.remove(); if (response instanceof WriteStream) { requests.add(request); - responseObserver.onNext((WriteStream) response); + responseObserver.onNext(((WriteStream) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { - responseObserver.onError((Exception) response); + responseObserver.onError(((Exception) response)); } else { responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); } @@ -124,10 +126,10 @@ public void finalizeWriteStream( Object response = responses.remove(); if (response instanceof FinalizeWriteStreamResponse) { requests.add(request); - responseObserver.onNext((FinalizeWriteStreamResponse) response); + responseObserver.onNext(((FinalizeWriteStreamResponse) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { - responseObserver.onError((Exception) response); + responseObserver.onError(((Exception) response)); } else { responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); } @@ -140,10 +142,10 @@ public void batchCommitWriteStreams( Object response = responses.remove(); if (response instanceof BatchCommitWriteStreamsResponse) { requests.add(request); - responseObserver.onNext((BatchCommitWriteStreamsResponse) response); + responseObserver.onNext(((BatchCommitWriteStreamsResponse) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { - responseObserver.onError((Exception) response); + responseObserver.onError(((Exception) response)); } else { responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); } @@ -155,10 +157,10 @@ public void flushRows( Object response = responses.remove(); if (response instanceof FlushRowsResponse) { requests.add(request); - responseObserver.onNext((FlushRowsResponse) response); + responseObserver.onNext(((FlushRowsResponse) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { - responseObserver.onError((Exception) response); + responseObserver.onError(((Exception) response)); } else { responseObserver.onError(new IllegalArgumentException("Unrecognized response type")); } diff --git a/proto-google-cloud-bigquerystorage-v1/src/main/java/com/google/cloud/bigquery/storage/v1/ProjectName.java b/proto-google-cloud-bigquerystorage-v1/src/main/java/com/google/cloud/bigquery/storage/v1/ProjectName.java index 0d8b2c2e12..e138c838d1 100644 --- a/proto-google-cloud-bigquerystorage-v1/src/main/java/com/google/cloud/bigquery/storage/v1/ProjectName.java +++ b/proto-google-cloud-bigquerystorage-v1/src/main/java/com/google/cloud/bigquery/storage/v1/ProjectName.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -23,18 +23,26 @@ import java.util.ArrayList; import java.util.List; import java.util.Map; +import java.util.Objects; +import javax.annotation.Generated; -/** AUTO-GENERATED DOCUMENTATION AND CLASS */ -@javax.annotation.Generated("by GAPIC protoc plugin") +// AUTO-GENERATED DOCUMENTATION AND CLASS. +@Generated("by gapic-generator-java") public class ProjectName implements ResourceName { - - private static final PathTemplate PATH_TEMPLATE = + private static final PathTemplate PROJECT = PathTemplate.createWithoutUrlEncoding("projects/{project}"); - private volatile Map fieldValuesMap; - private final String project; + @Deprecated + protected ProjectName() { + project = null; + } + + private ProjectName(Builder builder) { + project = Preconditions.checkNotNull(builder.getProject()); + } + public String getProject() { return project; } @@ -47,10 +55,6 @@ public Builder toBuilder() { return new Builder(this); } - private ProjectName(Builder builder) { - project = Preconditions.checkNotNull(builder.getProject()); - } - public static ProjectName of(String project) { return newBuilder().setProject(project).build(); } @@ -64,7 +68,7 @@ public static ProjectName parse(String formattedString) { return null; } Map matchMap = - PATH_TEMPLATE.validatedMatch( + PROJECT.validatedMatch( formattedString, "ProjectName.parse: formattedString not in valid format"); return of(matchMap.get("project")); } @@ -78,7 +82,7 @@ public static List parseList(List formattedStrings) { } public static List toStringList(List values) { - List list = new ArrayList(values.size()); + List list = new ArrayList<>(values.size()); for (ProjectName value : values) { if (value == null) { list.add(""); @@ -90,15 +94,18 @@ public static List toStringList(List values) { } public static boolean isParsableFrom(String formattedString) { - return PATH_TEMPLATE.matches(formattedString); + return PROJECT.matches(formattedString); } + @Override public Map getFieldValuesMap() { if (fieldValuesMap == null) { synchronized (this) { if (fieldValuesMap == null) { ImmutableMap.Builder fieldMapBuilder = ImmutableMap.builder(); - fieldMapBuilder.put("project", project); + if (project != null) { + fieldMapBuilder.put("project", project); + } fieldValuesMap = fieldMapBuilder.build(); } } @@ -112,14 +119,35 @@ public String getFieldValue(String fieldName) { @Override public String toString() { - return PATH_TEMPLATE.instantiate("project", project); + return PROJECT.instantiate("project", project); } - /** Builder for ProjectName. */ - public static class Builder { + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o != null || getClass() == o.getClass()) { + ProjectName that = ((ProjectName) o); + return Objects.equals(this.project, that.project); + } + return false; + } + @Override + public int hashCode() { + int h = 1; + h *= 1000003; + h ^= Objects.hashCode(project); + return h; + } + + /** Builder for projects/{project}. */ + public static class Builder { private String project; + protected Builder() {} + public String getProject() { return project; } @@ -129,8 +157,6 @@ public Builder setProject(String project) { return this; } - private Builder() {} - private Builder(ProjectName projectName) { project = projectName.project; } @@ -139,24 +165,4 @@ public ProjectName build() { return new ProjectName(this); } } - - @Override - public boolean equals(Object o) { - if (o == this) { - return true; - } - if (o instanceof ProjectName) { - ProjectName that = (ProjectName) o; - return (this.project.equals(that.project)); - } - return false; - } - - @Override - public int hashCode() { - int h = 1; - h *= 1000003; - h ^= project.hashCode(); - return h; - } } diff --git a/proto-google-cloud-bigquerystorage-v1/src/main/java/com/google/cloud/bigquery/storage/v1/ReadSessionName.java b/proto-google-cloud-bigquerystorage-v1/src/main/java/com/google/cloud/bigquery/storage/v1/ReadSessionName.java deleted file mode 100644 index 4aa5209ddd..0000000000 --- a/proto-google-cloud-bigquerystorage-v1/src/main/java/com/google/cloud/bigquery/storage/v1/ReadSessionName.java +++ /dev/null @@ -1,210 +0,0 @@ -/* - * Copyright 2020 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.google.cloud.bigquery.storage.v1; - -import com.google.api.pathtemplate.PathTemplate; -import com.google.api.resourcenames.ResourceName; -import com.google.common.base.Preconditions; -import com.google.common.collect.ImmutableMap; -import java.util.ArrayList; -import java.util.List; -import java.util.Map; - -/** AUTO-GENERATED DOCUMENTATION AND CLASS */ -@javax.annotation.Generated("by GAPIC protoc plugin") -public class ReadSessionName implements ResourceName { - - private static final PathTemplate PATH_TEMPLATE = - PathTemplate.createWithoutUrlEncoding( - "projects/{project}/locations/{location}/sessions/{session}"); - - private volatile Map fieldValuesMap; - - private final String project; - private final String location; - private final String session; - - public String getProject() { - return project; - } - - public String getLocation() { - return location; - } - - public String getSession() { - return session; - } - - public static Builder newBuilder() { - return new Builder(); - } - - public Builder toBuilder() { - return new Builder(this); - } - - private ReadSessionName(Builder builder) { - project = Preconditions.checkNotNull(builder.getProject()); - location = Preconditions.checkNotNull(builder.getLocation()); - session = Preconditions.checkNotNull(builder.getSession()); - } - - public static ReadSessionName of(String project, String location, String session) { - return newBuilder().setProject(project).setLocation(location).setSession(session).build(); - } - - public static String format(String project, String location, String session) { - return newBuilder() - .setProject(project) - .setLocation(location) - .setSession(session) - .build() - .toString(); - } - - public static ReadSessionName parse(String formattedString) { - if (formattedString.isEmpty()) { - return null; - } - Map matchMap = - PATH_TEMPLATE.validatedMatch( - formattedString, "ReadSessionName.parse: formattedString not in valid format"); - return of(matchMap.get("project"), matchMap.get("location"), matchMap.get("session")); - } - - public static List parseList(List formattedStrings) { - List list = new ArrayList<>(formattedStrings.size()); - for (String formattedString : formattedStrings) { - list.add(parse(formattedString)); - } - return list; - } - - public static List toStringList(List values) { - List list = new ArrayList(values.size()); - for (ReadSessionName value : values) { - if (value == null) { - list.add(""); - } else { - list.add(value.toString()); - } - } - return list; - } - - public static boolean isParsableFrom(String formattedString) { - return PATH_TEMPLATE.matches(formattedString); - } - - public Map getFieldValuesMap() { - if (fieldValuesMap == null) { - synchronized (this) { - if (fieldValuesMap == null) { - ImmutableMap.Builder fieldMapBuilder = ImmutableMap.builder(); - fieldMapBuilder.put("project", project); - fieldMapBuilder.put("location", location); - fieldMapBuilder.put("session", session); - fieldValuesMap = fieldMapBuilder.build(); - } - } - } - return fieldValuesMap; - } - - public String getFieldValue(String fieldName) { - return getFieldValuesMap().get(fieldName); - } - - @Override - public String toString() { - return PATH_TEMPLATE.instantiate("project", project, "location", location, "session", session); - } - - /** Builder for ReadSessionName. */ - public static class Builder { - - private String project; - private String location; - private String session; - - public String getProject() { - return project; - } - - public String getLocation() { - return location; - } - - public String getSession() { - return session; - } - - public Builder setProject(String project) { - this.project = project; - return this; - } - - public Builder setLocation(String location) { - this.location = location; - return this; - } - - public Builder setSession(String session) { - this.session = session; - return this; - } - - private Builder() {} - - private Builder(ReadSessionName readSessionName) { - project = readSessionName.project; - location = readSessionName.location; - session = readSessionName.session; - } - - public ReadSessionName build() { - return new ReadSessionName(this); - } - } - - @Override - public boolean equals(Object o) { - if (o == this) { - return true; - } - if (o instanceof ReadSessionName) { - ReadSessionName that = (ReadSessionName) o; - return (this.project.equals(that.project)) - && (this.location.equals(that.location)) - && (this.session.equals(that.session)); - } - return false; - } - - @Override - public int hashCode() { - int h = 1; - h *= 1000003; - h ^= project.hashCode(); - h *= 1000003; - h ^= location.hashCode(); - h *= 1000003; - h ^= session.hashCode(); - return h; - } -} diff --git a/proto-google-cloud-bigquerystorage-v1/src/main/java/com/google/cloud/bigquery/storage/v1/ReadStreamName.java b/proto-google-cloud-bigquerystorage-v1/src/main/java/com/google/cloud/bigquery/storage/v1/ReadStreamName.java index 9c8236c663..8c68ce74b7 100644 --- a/proto-google-cloud-bigquerystorage-v1/src/main/java/com/google/cloud/bigquery/storage/v1/ReadStreamName.java +++ b/proto-google-cloud-bigquerystorage-v1/src/main/java/com/google/cloud/bigquery/storage/v1/ReadStreamName.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -23,22 +23,36 @@ import java.util.ArrayList; import java.util.List; import java.util.Map; +import java.util.Objects; +import javax.annotation.Generated; -/** AUTO-GENERATED DOCUMENTATION AND CLASS */ -@javax.annotation.Generated("by GAPIC protoc plugin") +// AUTO-GENERATED DOCUMENTATION AND CLASS. +@Generated("by gapic-generator-java") public class ReadStreamName implements ResourceName { - - private static final PathTemplate PATH_TEMPLATE = + private static final PathTemplate PROJECT_LOCATION_SESSION_STREAM = PathTemplate.createWithoutUrlEncoding( "projects/{project}/locations/{location}/sessions/{session}/streams/{stream}"); - private volatile Map fieldValuesMap; - private final String project; private final String location; private final String session; private final String stream; + @Deprecated + protected ReadStreamName() { + project = null; + location = null; + session = null; + stream = null; + } + + private ReadStreamName(Builder builder) { + project = Preconditions.checkNotNull(builder.getProject()); + location = Preconditions.checkNotNull(builder.getLocation()); + session = Preconditions.checkNotNull(builder.getSession()); + stream = Preconditions.checkNotNull(builder.getStream()); + } + public String getProject() { return project; } @@ -63,13 +77,6 @@ public Builder toBuilder() { return new Builder(this); } - private ReadStreamName(Builder builder) { - project = Preconditions.checkNotNull(builder.getProject()); - location = Preconditions.checkNotNull(builder.getLocation()); - session = Preconditions.checkNotNull(builder.getSession()); - stream = Preconditions.checkNotNull(builder.getStream()); - } - public static ReadStreamName of(String project, String location, String session, String stream) { return newBuilder() .setProject(project) @@ -94,7 +101,7 @@ public static ReadStreamName parse(String formattedString) { return null; } Map matchMap = - PATH_TEMPLATE.validatedMatch( + PROJECT_LOCATION_SESSION_STREAM.validatedMatch( formattedString, "ReadStreamName.parse: formattedString not in valid format"); return of( matchMap.get("project"), @@ -112,7 +119,7 @@ public static List parseList(List formattedStrings) { } public static List toStringList(List values) { - List list = new ArrayList(values.size()); + List list = new ArrayList<>(values.size()); for (ReadStreamName value : values) { if (value == null) { list.add(""); @@ -124,18 +131,27 @@ public static List toStringList(List values) { } public static boolean isParsableFrom(String formattedString) { - return PATH_TEMPLATE.matches(formattedString); + return PROJECT_LOCATION_SESSION_STREAM.matches(formattedString); } + @Override public Map getFieldValuesMap() { if (fieldValuesMap == null) { synchronized (this) { if (fieldValuesMap == null) { ImmutableMap.Builder fieldMapBuilder = ImmutableMap.builder(); - fieldMapBuilder.put("project", project); - fieldMapBuilder.put("location", location); - fieldMapBuilder.put("session", session); - fieldMapBuilder.put("stream", stream); + if (project != null) { + fieldMapBuilder.put("project", project); + } + if (location != null) { + fieldMapBuilder.put("location", location); + } + if (session != null) { + fieldMapBuilder.put("session", session); + } + if (stream != null) { + fieldMapBuilder.put("stream", stream); + } fieldValuesMap = fieldMapBuilder.build(); } } @@ -149,18 +165,48 @@ public String getFieldValue(String fieldName) { @Override public String toString() { - return PATH_TEMPLATE.instantiate( + return PROJECT_LOCATION_SESSION_STREAM.instantiate( "project", project, "location", location, "session", session, "stream", stream); } - /** Builder for ReadStreamName. */ - public static class Builder { + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o != null || getClass() == o.getClass()) { + ReadStreamName that = ((ReadStreamName) o); + return Objects.equals(this.project, that.project) + && Objects.equals(this.location, that.location) + && Objects.equals(this.session, that.session) + && Objects.equals(this.stream, that.stream); + } + return false; + } + @Override + public int hashCode() { + int h = 1; + h *= 1000003; + h ^= Objects.hashCode(project); + h *= 1000003; + h ^= Objects.hashCode(location); + h *= 1000003; + h ^= Objects.hashCode(session); + h *= 1000003; + h ^= Objects.hashCode(stream); + return h; + } + + /** Builder for projects/{project}/locations/{location}/sessions/{session}/streams/{stream}. */ + public static class Builder { private String project; private String location; private String session; private String stream; + protected Builder() {} + public String getProject() { return project; } @@ -197,8 +243,6 @@ public Builder setStream(String stream) { return this; } - private Builder() {} - private Builder(ReadStreamName readStreamName) { project = readStreamName.project; location = readStreamName.location; @@ -210,33 +254,4 @@ public ReadStreamName build() { return new ReadStreamName(this); } } - - @Override - public boolean equals(Object o) { - if (o == this) { - return true; - } - if (o instanceof ReadStreamName) { - ReadStreamName that = (ReadStreamName) o; - return (this.project.equals(that.project)) - && (this.location.equals(that.location)) - && (this.session.equals(that.session)) - && (this.stream.equals(that.stream)); - } - return false; - } - - @Override - public int hashCode() { - int h = 1; - h *= 1000003; - h ^= project.hashCode(); - h *= 1000003; - h ^= location.hashCode(); - h *= 1000003; - h ^= session.hashCode(); - h *= 1000003; - h ^= stream.hashCode(); - return h; - } } diff --git a/proto-google-cloud-bigquerystorage-v1/src/main/java/com/google/cloud/bigquery/storage/v1/TableName.java b/proto-google-cloud-bigquerystorage-v1/src/main/java/com/google/cloud/bigquery/storage/v1/TableName.java deleted file mode 100644 index 4478859799..0000000000 --- a/proto-google-cloud-bigquerystorage-v1/src/main/java/com/google/cloud/bigquery/storage/v1/TableName.java +++ /dev/null @@ -1,204 +0,0 @@ -/* - * Copyright 2020 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.google.cloud.bigquery.storage.v1; - -import com.google.api.pathtemplate.PathTemplate; -import com.google.api.resourcenames.ResourceName; -import com.google.common.base.Preconditions; -import com.google.common.collect.ImmutableMap; -import java.util.ArrayList; -import java.util.List; -import java.util.Map; - -/** AUTO-GENERATED DOCUMENTATION AND CLASS */ -@javax.annotation.Generated("by GAPIC protoc plugin") -public class TableName implements ResourceName { - - private static final PathTemplate PATH_TEMPLATE = - PathTemplate.createWithoutUrlEncoding("projects/{project}/datasets/{dataset}/tables/{table}"); - - private volatile Map fieldValuesMap; - - private final String project; - private final String dataset; - private final String table; - - public String getProject() { - return project; - } - - public String getDataset() { - return dataset; - } - - public String getTable() { - return table; - } - - public static Builder newBuilder() { - return new Builder(); - } - - public Builder toBuilder() { - return new Builder(this); - } - - private TableName(Builder builder) { - project = Preconditions.checkNotNull(builder.getProject()); - dataset = Preconditions.checkNotNull(builder.getDataset()); - table = Preconditions.checkNotNull(builder.getTable()); - } - - public static TableName of(String project, String dataset, String table) { - return newBuilder().setProject(project).setDataset(dataset).setTable(table).build(); - } - - public static String format(String project, String dataset, String table) { - return newBuilder().setProject(project).setDataset(dataset).setTable(table).build().toString(); - } - - public static TableName parse(String formattedString) { - if (formattedString.isEmpty()) { - return null; - } - Map matchMap = - PATH_TEMPLATE.validatedMatch( - formattedString, "TableName.parse: formattedString not in valid format"); - return of(matchMap.get("project"), matchMap.get("dataset"), matchMap.get("table")); - } - - public static List parseList(List formattedStrings) { - List list = new ArrayList<>(formattedStrings.size()); - for (String formattedString : formattedStrings) { - list.add(parse(formattedString)); - } - return list; - } - - public static List toStringList(List values) { - List list = new ArrayList(values.size()); - for (TableName value : values) { - if (value == null) { - list.add(""); - } else { - list.add(value.toString()); - } - } - return list; - } - - public static boolean isParsableFrom(String formattedString) { - return PATH_TEMPLATE.matches(formattedString); - } - - public Map getFieldValuesMap() { - if (fieldValuesMap == null) { - synchronized (this) { - if (fieldValuesMap == null) { - ImmutableMap.Builder fieldMapBuilder = ImmutableMap.builder(); - fieldMapBuilder.put("project", project); - fieldMapBuilder.put("dataset", dataset); - fieldMapBuilder.put("table", table); - fieldValuesMap = fieldMapBuilder.build(); - } - } - } - return fieldValuesMap; - } - - public String getFieldValue(String fieldName) { - return getFieldValuesMap().get(fieldName); - } - - @Override - public String toString() { - return PATH_TEMPLATE.instantiate("project", project, "dataset", dataset, "table", table); - } - - /** Builder for TableName. */ - public static class Builder { - - private String project; - private String dataset; - private String table; - - public String getProject() { - return project; - } - - public String getDataset() { - return dataset; - } - - public String getTable() { - return table; - } - - public Builder setProject(String project) { - this.project = project; - return this; - } - - public Builder setDataset(String dataset) { - this.dataset = dataset; - return this; - } - - public Builder setTable(String table) { - this.table = table; - return this; - } - - private Builder() {} - - private Builder(TableName tableName) { - project = tableName.project; - dataset = tableName.dataset; - table = tableName.table; - } - - public TableName build() { - return new TableName(this); - } - } - - @Override - public boolean equals(Object o) { - if (o == this) { - return true; - } - if (o instanceof TableName) { - TableName that = (TableName) o; - return (this.project.equals(that.project)) - && (this.dataset.equals(that.dataset)) - && (this.table.equals(that.table)); - } - return false; - } - - @Override - public int hashCode() { - int h = 1; - h *= 1000003; - h ^= project.hashCode(); - h *= 1000003; - h ^= dataset.hashCode(); - h *= 1000003; - h ^= table.hashCode(); - return h; - } -} diff --git a/proto-google-cloud-bigquerystorage-v1alpha2/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/TableName.java b/proto-google-cloud-bigquerystorage-v1alpha2/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/TableName.java index 47ab519a3a..2a1d43d92b 100644 --- a/proto-google-cloud-bigquerystorage-v1alpha2/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/TableName.java +++ b/proto-google-cloud-bigquerystorage-v1alpha2/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/TableName.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -23,20 +23,32 @@ import java.util.ArrayList; import java.util.List; import java.util.Map; +import java.util.Objects; +import javax.annotation.Generated; -/** AUTO-GENERATED DOCUMENTATION AND CLASS */ -@javax.annotation.Generated("by GAPIC protoc plugin") +// AUTO-GENERATED DOCUMENTATION AND CLASS. +@Generated("by gapic-generator-java") public class TableName implements ResourceName { - - private static final PathTemplate PATH_TEMPLATE = + private static final PathTemplate PROJECT_DATASET_TABLE = PathTemplate.createWithoutUrlEncoding("projects/{project}/datasets/{dataset}/tables/{table}"); - private volatile Map fieldValuesMap; - private final String project; private final String dataset; private final String table; + @Deprecated + protected TableName() { + project = null; + dataset = null; + table = null; + } + + private TableName(Builder builder) { + project = Preconditions.checkNotNull(builder.getProject()); + dataset = Preconditions.checkNotNull(builder.getDataset()); + table = Preconditions.checkNotNull(builder.getTable()); + } + public String getProject() { return project; } @@ -57,12 +69,6 @@ public Builder toBuilder() { return new Builder(this); } - private TableName(Builder builder) { - project = Preconditions.checkNotNull(builder.getProject()); - dataset = Preconditions.checkNotNull(builder.getDataset()); - table = Preconditions.checkNotNull(builder.getTable()); - } - public static TableName of(String project, String dataset, String table) { return newBuilder().setProject(project).setDataset(dataset).setTable(table).build(); } @@ -76,7 +82,7 @@ public static TableName parse(String formattedString) { return null; } Map matchMap = - PATH_TEMPLATE.validatedMatch( + PROJECT_DATASET_TABLE.validatedMatch( formattedString, "TableName.parse: formattedString not in valid format"); return of(matchMap.get("project"), matchMap.get("dataset"), matchMap.get("table")); } @@ -90,7 +96,7 @@ public static List parseList(List formattedStrings) { } public static List toStringList(List values) { - List list = new ArrayList(values.size()); + List list = new ArrayList<>(values.size()); for (TableName value : values) { if (value == null) { list.add(""); @@ -102,17 +108,24 @@ public static List toStringList(List values) { } public static boolean isParsableFrom(String formattedString) { - return PATH_TEMPLATE.matches(formattedString); + return PROJECT_DATASET_TABLE.matches(formattedString); } + @Override public Map getFieldValuesMap() { if (fieldValuesMap == null) { synchronized (this) { if (fieldValuesMap == null) { ImmutableMap.Builder fieldMapBuilder = ImmutableMap.builder(); - fieldMapBuilder.put("project", project); - fieldMapBuilder.put("dataset", dataset); - fieldMapBuilder.put("table", table); + if (project != null) { + fieldMapBuilder.put("project", project); + } + if (dataset != null) { + fieldMapBuilder.put("dataset", dataset); + } + if (table != null) { + fieldMapBuilder.put("table", table); + } fieldValuesMap = fieldMapBuilder.build(); } } @@ -126,16 +139,44 @@ public String getFieldValue(String fieldName) { @Override public String toString() { - return PATH_TEMPLATE.instantiate("project", project, "dataset", dataset, "table", table); + return PROJECT_DATASET_TABLE.instantiate( + "project", project, "dataset", dataset, "table", table); } - /** Builder for TableName. */ - public static class Builder { + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o != null || getClass() == o.getClass()) { + TableName that = ((TableName) o); + return Objects.equals(this.project, that.project) + && Objects.equals(this.dataset, that.dataset) + && Objects.equals(this.table, that.table); + } + return false; + } + @Override + public int hashCode() { + int h = 1; + h *= 1000003; + h ^= Objects.hashCode(project); + h *= 1000003; + h ^= Objects.hashCode(dataset); + h *= 1000003; + h ^= Objects.hashCode(table); + return h; + } + + /** Builder for projects/{project}/datasets/{dataset}/tables/{table}. */ + public static class Builder { private String project; private String dataset; private String table; + protected Builder() {} + public String getProject() { return project; } @@ -163,8 +204,6 @@ public Builder setTable(String table) { return this; } - private Builder() {} - private Builder(TableName tableName) { project = tableName.project; dataset = tableName.dataset; @@ -175,30 +214,4 @@ public TableName build() { return new TableName(this); } } - - @Override - public boolean equals(Object o) { - if (o == this) { - return true; - } - if (o instanceof TableName) { - TableName that = (TableName) o; - return (this.project.equals(that.project)) - && (this.dataset.equals(that.dataset)) - && (this.table.equals(that.table)); - } - return false; - } - - @Override - public int hashCode() { - int h = 1; - h *= 1000003; - h ^= project.hashCode(); - h *= 1000003; - h ^= dataset.hashCode(); - h *= 1000003; - h ^= table.hashCode(); - return h; - } } diff --git a/proto-google-cloud-bigquerystorage-v1alpha2/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/WriteStreamName.java b/proto-google-cloud-bigquerystorage-v1alpha2/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/WriteStreamName.java index dbc4bd48a7..7b2430f06a 100644 --- a/proto-google-cloud-bigquerystorage-v1alpha2/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/WriteStreamName.java +++ b/proto-google-cloud-bigquerystorage-v1alpha2/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/WriteStreamName.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -23,22 +23,36 @@ import java.util.ArrayList; import java.util.List; import java.util.Map; +import java.util.Objects; +import javax.annotation.Generated; -/** AUTO-GENERATED DOCUMENTATION AND CLASS */ -@javax.annotation.Generated("by GAPIC protoc plugin") +// AUTO-GENERATED DOCUMENTATION AND CLASS. +@Generated("by gapic-generator-java") public class WriteStreamName implements ResourceName { - - private static final PathTemplate PATH_TEMPLATE = + private static final PathTemplate PROJECT_DATASET_TABLE_STREAM = PathTemplate.createWithoutUrlEncoding( "projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}"); - private volatile Map fieldValuesMap; - private final String project; private final String dataset; private final String table; private final String stream; + @Deprecated + protected WriteStreamName() { + project = null; + dataset = null; + table = null; + stream = null; + } + + private WriteStreamName(Builder builder) { + project = Preconditions.checkNotNull(builder.getProject()); + dataset = Preconditions.checkNotNull(builder.getDataset()); + table = Preconditions.checkNotNull(builder.getTable()); + stream = Preconditions.checkNotNull(builder.getStream()); + } + public String getProject() { return project; } @@ -63,13 +77,6 @@ public Builder toBuilder() { return new Builder(this); } - private WriteStreamName(Builder builder) { - project = Preconditions.checkNotNull(builder.getProject()); - dataset = Preconditions.checkNotNull(builder.getDataset()); - table = Preconditions.checkNotNull(builder.getTable()); - stream = Preconditions.checkNotNull(builder.getStream()); - } - public static WriteStreamName of(String project, String dataset, String table, String stream) { return newBuilder() .setProject(project) @@ -94,7 +101,7 @@ public static WriteStreamName parse(String formattedString) { return null; } Map matchMap = - PATH_TEMPLATE.validatedMatch( + PROJECT_DATASET_TABLE_STREAM.validatedMatch( formattedString, "WriteStreamName.parse: formattedString not in valid format"); return of( matchMap.get("project"), @@ -112,7 +119,7 @@ public static List parseList(List formattedStrings) { } public static List toStringList(List values) { - List list = new ArrayList(values.size()); + List list = new ArrayList<>(values.size()); for (WriteStreamName value : values) { if (value == null) { list.add(""); @@ -124,18 +131,27 @@ public static List toStringList(List values) { } public static boolean isParsableFrom(String formattedString) { - return PATH_TEMPLATE.matches(formattedString); + return PROJECT_DATASET_TABLE_STREAM.matches(formattedString); } + @Override public Map getFieldValuesMap() { if (fieldValuesMap == null) { synchronized (this) { if (fieldValuesMap == null) { ImmutableMap.Builder fieldMapBuilder = ImmutableMap.builder(); - fieldMapBuilder.put("project", project); - fieldMapBuilder.put("dataset", dataset); - fieldMapBuilder.put("table", table); - fieldMapBuilder.put("stream", stream); + if (project != null) { + fieldMapBuilder.put("project", project); + } + if (dataset != null) { + fieldMapBuilder.put("dataset", dataset); + } + if (table != null) { + fieldMapBuilder.put("table", table); + } + if (stream != null) { + fieldMapBuilder.put("stream", stream); + } fieldValuesMap = fieldMapBuilder.build(); } } @@ -149,18 +165,48 @@ public String getFieldValue(String fieldName) { @Override public String toString() { - return PATH_TEMPLATE.instantiate( + return PROJECT_DATASET_TABLE_STREAM.instantiate( "project", project, "dataset", dataset, "table", table, "stream", stream); } - /** Builder for WriteStreamName. */ - public static class Builder { + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o != null || getClass() == o.getClass()) { + WriteStreamName that = ((WriteStreamName) o); + return Objects.equals(this.project, that.project) + && Objects.equals(this.dataset, that.dataset) + && Objects.equals(this.table, that.table) + && Objects.equals(this.stream, that.stream); + } + return false; + } + @Override + public int hashCode() { + int h = 1; + h *= 1000003; + h ^= Objects.hashCode(project); + h *= 1000003; + h ^= Objects.hashCode(dataset); + h *= 1000003; + h ^= Objects.hashCode(table); + h *= 1000003; + h ^= Objects.hashCode(stream); + return h; + } + + /** Builder for projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}. */ + public static class Builder { private String project; private String dataset; private String table; private String stream; + protected Builder() {} + public String getProject() { return project; } @@ -197,8 +243,6 @@ public Builder setStream(String stream) { return this; } - private Builder() {} - private Builder(WriteStreamName writeStreamName) { project = writeStreamName.project; dataset = writeStreamName.dataset; @@ -210,33 +254,4 @@ public WriteStreamName build() { return new WriteStreamName(this); } } - - @Override - public boolean equals(Object o) { - if (o == this) { - return true; - } - if (o instanceof WriteStreamName) { - WriteStreamName that = (WriteStreamName) o; - return (this.project.equals(that.project)) - && (this.dataset.equals(that.dataset)) - && (this.table.equals(that.table)) - && (this.stream.equals(that.stream)); - } - return false; - } - - @Override - public int hashCode() { - int h = 1; - h *= 1000003; - h ^= project.hashCode(); - h *= 1000003; - h ^= dataset.hashCode(); - h *= 1000003; - h ^= table.hashCode(); - h *= 1000003; - h ^= stream.hashCode(); - return h; - } } diff --git a/proto-google-cloud-bigquerystorage-v1beta1/src/main/java/com/google/cloud/bigquery/storage/v1beta1/ProjectName.java b/proto-google-cloud-bigquerystorage-v1beta1/src/main/java/com/google/cloud/bigquery/storage/v1beta1/ProjectName.java index 3ffe5f3360..61336b2620 100644 --- a/proto-google-cloud-bigquerystorage-v1beta1/src/main/java/com/google/cloud/bigquery/storage/v1beta1/ProjectName.java +++ b/proto-google-cloud-bigquerystorage-v1beta1/src/main/java/com/google/cloud/bigquery/storage/v1beta1/ProjectName.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -23,18 +23,26 @@ import java.util.ArrayList; import java.util.List; import java.util.Map; +import java.util.Objects; +import javax.annotation.Generated; -/** AUTO-GENERATED DOCUMENTATION AND CLASS */ -@javax.annotation.Generated("by GAPIC protoc plugin") +// AUTO-GENERATED DOCUMENTATION AND CLASS. +@Generated("by gapic-generator-java") public class ProjectName implements ResourceName { - - private static final PathTemplate PATH_TEMPLATE = + private static final PathTemplate PROJECT = PathTemplate.createWithoutUrlEncoding("projects/{project}"); - private volatile Map fieldValuesMap; - private final String project; + @Deprecated + protected ProjectName() { + project = null; + } + + private ProjectName(Builder builder) { + project = Preconditions.checkNotNull(builder.getProject()); + } + public String getProject() { return project; } @@ -47,10 +55,6 @@ public Builder toBuilder() { return new Builder(this); } - private ProjectName(Builder builder) { - project = Preconditions.checkNotNull(builder.getProject()); - } - public static ProjectName of(String project) { return newBuilder().setProject(project).build(); } @@ -64,7 +68,7 @@ public static ProjectName parse(String formattedString) { return null; } Map matchMap = - PATH_TEMPLATE.validatedMatch( + PROJECT.validatedMatch( formattedString, "ProjectName.parse: formattedString not in valid format"); return of(matchMap.get("project")); } @@ -78,7 +82,7 @@ public static List parseList(List formattedStrings) { } public static List toStringList(List values) { - List list = new ArrayList(values.size()); + List list = new ArrayList<>(values.size()); for (ProjectName value : values) { if (value == null) { list.add(""); @@ -90,15 +94,18 @@ public static List toStringList(List values) { } public static boolean isParsableFrom(String formattedString) { - return PATH_TEMPLATE.matches(formattedString); + return PROJECT.matches(formattedString); } + @Override public Map getFieldValuesMap() { if (fieldValuesMap == null) { synchronized (this) { if (fieldValuesMap == null) { ImmutableMap.Builder fieldMapBuilder = ImmutableMap.builder(); - fieldMapBuilder.put("project", project); + if (project != null) { + fieldMapBuilder.put("project", project); + } fieldValuesMap = fieldMapBuilder.build(); } } @@ -112,14 +119,35 @@ public String getFieldValue(String fieldName) { @Override public String toString() { - return PATH_TEMPLATE.instantiate("project", project); + return PROJECT.instantiate("project", project); } - /** Builder for ProjectName. */ - public static class Builder { + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o != null || getClass() == o.getClass()) { + ProjectName that = ((ProjectName) o); + return Objects.equals(this.project, that.project); + } + return false; + } + @Override + public int hashCode() { + int h = 1; + h *= 1000003; + h ^= Objects.hashCode(project); + return h; + } + + /** Builder for projects/{project}. */ + public static class Builder { private String project; + protected Builder() {} + public String getProject() { return project; } @@ -129,8 +157,6 @@ public Builder setProject(String project) { return this; } - private Builder() {} - private Builder(ProjectName projectName) { project = projectName.project; } @@ -139,24 +165,4 @@ public ProjectName build() { return new ProjectName(this); } } - - @Override - public boolean equals(Object o) { - if (o == this) { - return true; - } - if (o instanceof ProjectName) { - ProjectName that = (ProjectName) o; - return (this.project.equals(that.project)); - } - return false; - } - - @Override - public int hashCode() { - int h = 1; - h *= 1000003; - h ^= project.hashCode(); - return h; - } } diff --git a/proto-google-cloud-bigquerystorage-v1beta1/src/main/java/com/google/cloud/bigquery/storage/v1beta1/ReadSessionName.java b/proto-google-cloud-bigquerystorage-v1beta1/src/main/java/com/google/cloud/bigquery/storage/v1beta1/ReadSessionName.java deleted file mode 100644 index c0762f78e5..0000000000 --- a/proto-google-cloud-bigquerystorage-v1beta1/src/main/java/com/google/cloud/bigquery/storage/v1beta1/ReadSessionName.java +++ /dev/null @@ -1,210 +0,0 @@ -/* - * Copyright 2020 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.google.cloud.bigquery.storage.v1beta1; - -import com.google.api.pathtemplate.PathTemplate; -import com.google.api.resourcenames.ResourceName; -import com.google.common.base.Preconditions; -import com.google.common.collect.ImmutableMap; -import java.util.ArrayList; -import java.util.List; -import java.util.Map; - -/** AUTO-GENERATED DOCUMENTATION AND CLASS */ -@javax.annotation.Generated("by GAPIC protoc plugin") -public class ReadSessionName implements ResourceName { - - private static final PathTemplate PATH_TEMPLATE = - PathTemplate.createWithoutUrlEncoding( - "projects/{project}/locations/{location}/sessions/{session}"); - - private volatile Map fieldValuesMap; - - private final String project; - private final String location; - private final String session; - - public String getProject() { - return project; - } - - public String getLocation() { - return location; - } - - public String getSession() { - return session; - } - - public static Builder newBuilder() { - return new Builder(); - } - - public Builder toBuilder() { - return new Builder(this); - } - - private ReadSessionName(Builder builder) { - project = Preconditions.checkNotNull(builder.getProject()); - location = Preconditions.checkNotNull(builder.getLocation()); - session = Preconditions.checkNotNull(builder.getSession()); - } - - public static ReadSessionName of(String project, String location, String session) { - return newBuilder().setProject(project).setLocation(location).setSession(session).build(); - } - - public static String format(String project, String location, String session) { - return newBuilder() - .setProject(project) - .setLocation(location) - .setSession(session) - .build() - .toString(); - } - - public static ReadSessionName parse(String formattedString) { - if (formattedString.isEmpty()) { - return null; - } - Map matchMap = - PATH_TEMPLATE.validatedMatch( - formattedString, "ReadSessionName.parse: formattedString not in valid format"); - return of(matchMap.get("project"), matchMap.get("location"), matchMap.get("session")); - } - - public static List parseList(List formattedStrings) { - List list = new ArrayList<>(formattedStrings.size()); - for (String formattedString : formattedStrings) { - list.add(parse(formattedString)); - } - return list; - } - - public static List toStringList(List values) { - List list = new ArrayList(values.size()); - for (ReadSessionName value : values) { - if (value == null) { - list.add(""); - } else { - list.add(value.toString()); - } - } - return list; - } - - public static boolean isParsableFrom(String formattedString) { - return PATH_TEMPLATE.matches(formattedString); - } - - public Map getFieldValuesMap() { - if (fieldValuesMap == null) { - synchronized (this) { - if (fieldValuesMap == null) { - ImmutableMap.Builder fieldMapBuilder = ImmutableMap.builder(); - fieldMapBuilder.put("project", project); - fieldMapBuilder.put("location", location); - fieldMapBuilder.put("session", session); - fieldValuesMap = fieldMapBuilder.build(); - } - } - } - return fieldValuesMap; - } - - public String getFieldValue(String fieldName) { - return getFieldValuesMap().get(fieldName); - } - - @Override - public String toString() { - return PATH_TEMPLATE.instantiate("project", project, "location", location, "session", session); - } - - /** Builder for ReadSessionName. */ - public static class Builder { - - private String project; - private String location; - private String session; - - public String getProject() { - return project; - } - - public String getLocation() { - return location; - } - - public String getSession() { - return session; - } - - public Builder setProject(String project) { - this.project = project; - return this; - } - - public Builder setLocation(String location) { - this.location = location; - return this; - } - - public Builder setSession(String session) { - this.session = session; - return this; - } - - private Builder() {} - - private Builder(ReadSessionName readSessionName) { - project = readSessionName.project; - location = readSessionName.location; - session = readSessionName.session; - } - - public ReadSessionName build() { - return new ReadSessionName(this); - } - } - - @Override - public boolean equals(Object o) { - if (o == this) { - return true; - } - if (o instanceof ReadSessionName) { - ReadSessionName that = (ReadSessionName) o; - return (this.project.equals(that.project)) - && (this.location.equals(that.location)) - && (this.session.equals(that.session)); - } - return false; - } - - @Override - public int hashCode() { - int h = 1; - h *= 1000003; - h ^= project.hashCode(); - h *= 1000003; - h ^= location.hashCode(); - h *= 1000003; - h ^= session.hashCode(); - return h; - } -} diff --git a/proto-google-cloud-bigquerystorage-v1beta1/src/main/java/com/google/cloud/bigquery/storage/v1beta1/StreamName.java b/proto-google-cloud-bigquerystorage-v1beta1/src/main/java/com/google/cloud/bigquery/storage/v1beta1/StreamName.java deleted file mode 100644 index a486d4fc85..0000000000 --- a/proto-google-cloud-bigquerystorage-v1beta1/src/main/java/com/google/cloud/bigquery/storage/v1beta1/StreamName.java +++ /dev/null @@ -1,210 +0,0 @@ -/* - * Copyright 2020 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.google.cloud.bigquery.storage.v1beta1; - -import com.google.api.pathtemplate.PathTemplate; -import com.google.api.resourcenames.ResourceName; -import com.google.common.base.Preconditions; -import com.google.common.collect.ImmutableMap; -import java.util.ArrayList; -import java.util.List; -import java.util.Map; - -/** AUTO-GENERATED DOCUMENTATION AND CLASS */ -@javax.annotation.Generated("by GAPIC protoc plugin") -public class StreamName implements ResourceName { - - private static final PathTemplate PATH_TEMPLATE = - PathTemplate.createWithoutUrlEncoding( - "projects/{project}/locations/{location}/streams/{stream}"); - - private volatile Map fieldValuesMap; - - private final String project; - private final String location; - private final String stream; - - public String getProject() { - return project; - } - - public String getLocation() { - return location; - } - - public String getStream() { - return stream; - } - - public static Builder newBuilder() { - return new Builder(); - } - - public Builder toBuilder() { - return new Builder(this); - } - - private StreamName(Builder builder) { - project = Preconditions.checkNotNull(builder.getProject()); - location = Preconditions.checkNotNull(builder.getLocation()); - stream = Preconditions.checkNotNull(builder.getStream()); - } - - public static StreamName of(String project, String location, String stream) { - return newBuilder().setProject(project).setLocation(location).setStream(stream).build(); - } - - public static String format(String project, String location, String stream) { - return newBuilder() - .setProject(project) - .setLocation(location) - .setStream(stream) - .build() - .toString(); - } - - public static StreamName parse(String formattedString) { - if (formattedString.isEmpty()) { - return null; - } - Map matchMap = - PATH_TEMPLATE.validatedMatch( - formattedString, "StreamName.parse: formattedString not in valid format"); - return of(matchMap.get("project"), matchMap.get("location"), matchMap.get("stream")); - } - - public static List parseList(List formattedStrings) { - List list = new ArrayList<>(formattedStrings.size()); - for (String formattedString : formattedStrings) { - list.add(parse(formattedString)); - } - return list; - } - - public static List toStringList(List values) { - List list = new ArrayList(values.size()); - for (StreamName value : values) { - if (value == null) { - list.add(""); - } else { - list.add(value.toString()); - } - } - return list; - } - - public static boolean isParsableFrom(String formattedString) { - return PATH_TEMPLATE.matches(formattedString); - } - - public Map getFieldValuesMap() { - if (fieldValuesMap == null) { - synchronized (this) { - if (fieldValuesMap == null) { - ImmutableMap.Builder fieldMapBuilder = ImmutableMap.builder(); - fieldMapBuilder.put("project", project); - fieldMapBuilder.put("location", location); - fieldMapBuilder.put("stream", stream); - fieldValuesMap = fieldMapBuilder.build(); - } - } - } - return fieldValuesMap; - } - - public String getFieldValue(String fieldName) { - return getFieldValuesMap().get(fieldName); - } - - @Override - public String toString() { - return PATH_TEMPLATE.instantiate("project", project, "location", location, "stream", stream); - } - - /** Builder for StreamName. */ - public static class Builder { - - private String project; - private String location; - private String stream; - - public String getProject() { - return project; - } - - public String getLocation() { - return location; - } - - public String getStream() { - return stream; - } - - public Builder setProject(String project) { - this.project = project; - return this; - } - - public Builder setLocation(String location) { - this.location = location; - return this; - } - - public Builder setStream(String stream) { - this.stream = stream; - return this; - } - - private Builder() {} - - private Builder(StreamName streamName) { - project = streamName.project; - location = streamName.location; - stream = streamName.stream; - } - - public StreamName build() { - return new StreamName(this); - } - } - - @Override - public boolean equals(Object o) { - if (o == this) { - return true; - } - if (o instanceof StreamName) { - StreamName that = (StreamName) o; - return (this.project.equals(that.project)) - && (this.location.equals(that.location)) - && (this.stream.equals(that.stream)); - } - return false; - } - - @Override - public int hashCode() { - int h = 1; - h *= 1000003; - h ^= project.hashCode(); - h *= 1000003; - h ^= location.hashCode(); - h *= 1000003; - h ^= stream.hashCode(); - return h; - } -} diff --git a/proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/ProjectName.java b/proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/ProjectName.java index 3ca1b0df14..e6400237ee 100644 --- a/proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/ProjectName.java +++ b/proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/ProjectName.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -23,18 +23,26 @@ import java.util.ArrayList; import java.util.List; import java.util.Map; +import java.util.Objects; +import javax.annotation.Generated; -/** AUTO-GENERATED DOCUMENTATION AND CLASS */ -@javax.annotation.Generated("by GAPIC protoc plugin") +// AUTO-GENERATED DOCUMENTATION AND CLASS. +@Generated("by gapic-generator-java") public class ProjectName implements ResourceName { - - private static final PathTemplate PATH_TEMPLATE = + private static final PathTemplate PROJECT = PathTemplate.createWithoutUrlEncoding("projects/{project}"); - private volatile Map fieldValuesMap; - private final String project; + @Deprecated + protected ProjectName() { + project = null; + } + + private ProjectName(Builder builder) { + project = Preconditions.checkNotNull(builder.getProject()); + } + public String getProject() { return project; } @@ -47,10 +55,6 @@ public Builder toBuilder() { return new Builder(this); } - private ProjectName(Builder builder) { - project = Preconditions.checkNotNull(builder.getProject()); - } - public static ProjectName of(String project) { return newBuilder().setProject(project).build(); } @@ -64,7 +68,7 @@ public static ProjectName parse(String formattedString) { return null; } Map matchMap = - PATH_TEMPLATE.validatedMatch( + PROJECT.validatedMatch( formattedString, "ProjectName.parse: formattedString not in valid format"); return of(matchMap.get("project")); } @@ -78,7 +82,7 @@ public static List parseList(List formattedStrings) { } public static List toStringList(List values) { - List list = new ArrayList(values.size()); + List list = new ArrayList<>(values.size()); for (ProjectName value : values) { if (value == null) { list.add(""); @@ -90,15 +94,18 @@ public static List toStringList(List values) { } public static boolean isParsableFrom(String formattedString) { - return PATH_TEMPLATE.matches(formattedString); + return PROJECT.matches(formattedString); } + @Override public Map getFieldValuesMap() { if (fieldValuesMap == null) { synchronized (this) { if (fieldValuesMap == null) { ImmutableMap.Builder fieldMapBuilder = ImmutableMap.builder(); - fieldMapBuilder.put("project", project); + if (project != null) { + fieldMapBuilder.put("project", project); + } fieldValuesMap = fieldMapBuilder.build(); } } @@ -112,14 +119,35 @@ public String getFieldValue(String fieldName) { @Override public String toString() { - return PATH_TEMPLATE.instantiate("project", project); + return PROJECT.instantiate("project", project); } - /** Builder for ProjectName. */ - public static class Builder { + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o != null || getClass() == o.getClass()) { + ProjectName that = ((ProjectName) o); + return Objects.equals(this.project, that.project); + } + return false; + } + @Override + public int hashCode() { + int h = 1; + h *= 1000003; + h ^= Objects.hashCode(project); + return h; + } + + /** Builder for projects/{project}. */ + public static class Builder { private String project; + protected Builder() {} + public String getProject() { return project; } @@ -129,8 +157,6 @@ public Builder setProject(String project) { return this; } - private Builder() {} - private Builder(ProjectName projectName) { project = projectName.project; } @@ -139,24 +165,4 @@ public ProjectName build() { return new ProjectName(this); } } - - @Override - public boolean equals(Object o) { - if (o == this) { - return true; - } - if (o instanceof ProjectName) { - ProjectName that = (ProjectName) o; - return (this.project.equals(that.project)); - } - return false; - } - - @Override - public int hashCode() { - int h = 1; - h *= 1000003; - h ^= project.hashCode(); - return h; - } } diff --git a/proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/ReadSessionName.java b/proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/ReadSessionName.java deleted file mode 100644 index 46c8731d94..0000000000 --- a/proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/ReadSessionName.java +++ /dev/null @@ -1,210 +0,0 @@ -/* - * Copyright 2020 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.google.cloud.bigquery.storage.v1beta2; - -import com.google.api.pathtemplate.PathTemplate; -import com.google.api.resourcenames.ResourceName; -import com.google.common.base.Preconditions; -import com.google.common.collect.ImmutableMap; -import java.util.ArrayList; -import java.util.List; -import java.util.Map; - -/** AUTO-GENERATED DOCUMENTATION AND CLASS */ -@javax.annotation.Generated("by GAPIC protoc plugin") -public class ReadSessionName implements ResourceName { - - private static final PathTemplate PATH_TEMPLATE = - PathTemplate.createWithoutUrlEncoding( - "projects/{project}/locations/{location}/sessions/{session}"); - - private volatile Map fieldValuesMap; - - private final String project; - private final String location; - private final String session; - - public String getProject() { - return project; - } - - public String getLocation() { - return location; - } - - public String getSession() { - return session; - } - - public static Builder newBuilder() { - return new Builder(); - } - - public Builder toBuilder() { - return new Builder(this); - } - - private ReadSessionName(Builder builder) { - project = Preconditions.checkNotNull(builder.getProject()); - location = Preconditions.checkNotNull(builder.getLocation()); - session = Preconditions.checkNotNull(builder.getSession()); - } - - public static ReadSessionName of(String project, String location, String session) { - return newBuilder().setProject(project).setLocation(location).setSession(session).build(); - } - - public static String format(String project, String location, String session) { - return newBuilder() - .setProject(project) - .setLocation(location) - .setSession(session) - .build() - .toString(); - } - - public static ReadSessionName parse(String formattedString) { - if (formattedString.isEmpty()) { - return null; - } - Map matchMap = - PATH_TEMPLATE.validatedMatch( - formattedString, "ReadSessionName.parse: formattedString not in valid format"); - return of(matchMap.get("project"), matchMap.get("location"), matchMap.get("session")); - } - - public static List parseList(List formattedStrings) { - List list = new ArrayList<>(formattedStrings.size()); - for (String formattedString : formattedStrings) { - list.add(parse(formattedString)); - } - return list; - } - - public static List toStringList(List values) { - List list = new ArrayList(values.size()); - for (ReadSessionName value : values) { - if (value == null) { - list.add(""); - } else { - list.add(value.toString()); - } - } - return list; - } - - public static boolean isParsableFrom(String formattedString) { - return PATH_TEMPLATE.matches(formattedString); - } - - public Map getFieldValuesMap() { - if (fieldValuesMap == null) { - synchronized (this) { - if (fieldValuesMap == null) { - ImmutableMap.Builder fieldMapBuilder = ImmutableMap.builder(); - fieldMapBuilder.put("project", project); - fieldMapBuilder.put("location", location); - fieldMapBuilder.put("session", session); - fieldValuesMap = fieldMapBuilder.build(); - } - } - } - return fieldValuesMap; - } - - public String getFieldValue(String fieldName) { - return getFieldValuesMap().get(fieldName); - } - - @Override - public String toString() { - return PATH_TEMPLATE.instantiate("project", project, "location", location, "session", session); - } - - /** Builder for ReadSessionName. */ - public static class Builder { - - private String project; - private String location; - private String session; - - public String getProject() { - return project; - } - - public String getLocation() { - return location; - } - - public String getSession() { - return session; - } - - public Builder setProject(String project) { - this.project = project; - return this; - } - - public Builder setLocation(String location) { - this.location = location; - return this; - } - - public Builder setSession(String session) { - this.session = session; - return this; - } - - private Builder() {} - - private Builder(ReadSessionName readSessionName) { - project = readSessionName.project; - location = readSessionName.location; - session = readSessionName.session; - } - - public ReadSessionName build() { - return new ReadSessionName(this); - } - } - - @Override - public boolean equals(Object o) { - if (o == this) { - return true; - } - if (o instanceof ReadSessionName) { - ReadSessionName that = (ReadSessionName) o; - return (this.project.equals(that.project)) - && (this.location.equals(that.location)) - && (this.session.equals(that.session)); - } - return false; - } - - @Override - public int hashCode() { - int h = 1; - h *= 1000003; - h ^= project.hashCode(); - h *= 1000003; - h ^= location.hashCode(); - h *= 1000003; - h ^= session.hashCode(); - return h; - } -} diff --git a/proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/ReadStreamName.java b/proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/ReadStreamName.java index cd616ebba7..eab784f8ac 100644 --- a/proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/ReadStreamName.java +++ b/proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/ReadStreamName.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -23,22 +23,36 @@ import java.util.ArrayList; import java.util.List; import java.util.Map; +import java.util.Objects; +import javax.annotation.Generated; -/** AUTO-GENERATED DOCUMENTATION AND CLASS */ -@javax.annotation.Generated("by GAPIC protoc plugin") +// AUTO-GENERATED DOCUMENTATION AND CLASS. +@Generated("by gapic-generator-java") public class ReadStreamName implements ResourceName { - - private static final PathTemplate PATH_TEMPLATE = + private static final PathTemplate PROJECT_LOCATION_SESSION_STREAM = PathTemplate.createWithoutUrlEncoding( "projects/{project}/locations/{location}/sessions/{session}/streams/{stream}"); - private volatile Map fieldValuesMap; - private final String project; private final String location; private final String session; private final String stream; + @Deprecated + protected ReadStreamName() { + project = null; + location = null; + session = null; + stream = null; + } + + private ReadStreamName(Builder builder) { + project = Preconditions.checkNotNull(builder.getProject()); + location = Preconditions.checkNotNull(builder.getLocation()); + session = Preconditions.checkNotNull(builder.getSession()); + stream = Preconditions.checkNotNull(builder.getStream()); + } + public String getProject() { return project; } @@ -63,13 +77,6 @@ public Builder toBuilder() { return new Builder(this); } - private ReadStreamName(Builder builder) { - project = Preconditions.checkNotNull(builder.getProject()); - location = Preconditions.checkNotNull(builder.getLocation()); - session = Preconditions.checkNotNull(builder.getSession()); - stream = Preconditions.checkNotNull(builder.getStream()); - } - public static ReadStreamName of(String project, String location, String session, String stream) { return newBuilder() .setProject(project) @@ -94,7 +101,7 @@ public static ReadStreamName parse(String formattedString) { return null; } Map matchMap = - PATH_TEMPLATE.validatedMatch( + PROJECT_LOCATION_SESSION_STREAM.validatedMatch( formattedString, "ReadStreamName.parse: formattedString not in valid format"); return of( matchMap.get("project"), @@ -112,7 +119,7 @@ public static List parseList(List formattedStrings) { } public static List toStringList(List values) { - List list = new ArrayList(values.size()); + List list = new ArrayList<>(values.size()); for (ReadStreamName value : values) { if (value == null) { list.add(""); @@ -124,18 +131,27 @@ public static List toStringList(List values) { } public static boolean isParsableFrom(String formattedString) { - return PATH_TEMPLATE.matches(formattedString); + return PROJECT_LOCATION_SESSION_STREAM.matches(formattedString); } + @Override public Map getFieldValuesMap() { if (fieldValuesMap == null) { synchronized (this) { if (fieldValuesMap == null) { ImmutableMap.Builder fieldMapBuilder = ImmutableMap.builder(); - fieldMapBuilder.put("project", project); - fieldMapBuilder.put("location", location); - fieldMapBuilder.put("session", session); - fieldMapBuilder.put("stream", stream); + if (project != null) { + fieldMapBuilder.put("project", project); + } + if (location != null) { + fieldMapBuilder.put("location", location); + } + if (session != null) { + fieldMapBuilder.put("session", session); + } + if (stream != null) { + fieldMapBuilder.put("stream", stream); + } fieldValuesMap = fieldMapBuilder.build(); } } @@ -149,18 +165,48 @@ public String getFieldValue(String fieldName) { @Override public String toString() { - return PATH_TEMPLATE.instantiate( + return PROJECT_LOCATION_SESSION_STREAM.instantiate( "project", project, "location", location, "session", session, "stream", stream); } - /** Builder for ReadStreamName. */ - public static class Builder { + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o != null || getClass() == o.getClass()) { + ReadStreamName that = ((ReadStreamName) o); + return Objects.equals(this.project, that.project) + && Objects.equals(this.location, that.location) + && Objects.equals(this.session, that.session) + && Objects.equals(this.stream, that.stream); + } + return false; + } + @Override + public int hashCode() { + int h = 1; + h *= 1000003; + h ^= Objects.hashCode(project); + h *= 1000003; + h ^= Objects.hashCode(location); + h *= 1000003; + h ^= Objects.hashCode(session); + h *= 1000003; + h ^= Objects.hashCode(stream); + return h; + } + + /** Builder for projects/{project}/locations/{location}/sessions/{session}/streams/{stream}. */ + public static class Builder { private String project; private String location; private String session; private String stream; + protected Builder() {} + public String getProject() { return project; } @@ -197,8 +243,6 @@ public Builder setStream(String stream) { return this; } - private Builder() {} - private Builder(ReadStreamName readStreamName) { project = readStreamName.project; location = readStreamName.location; @@ -210,33 +254,4 @@ public ReadStreamName build() { return new ReadStreamName(this); } } - - @Override - public boolean equals(Object o) { - if (o == this) { - return true; - } - if (o instanceof ReadStreamName) { - ReadStreamName that = (ReadStreamName) o; - return (this.project.equals(that.project)) - && (this.location.equals(that.location)) - && (this.session.equals(that.session)) - && (this.stream.equals(that.stream)); - } - return false; - } - - @Override - public int hashCode() { - int h = 1; - h *= 1000003; - h ^= project.hashCode(); - h *= 1000003; - h ^= location.hashCode(); - h *= 1000003; - h ^= session.hashCode(); - h *= 1000003; - h ^= stream.hashCode(); - return h; - } } diff --git a/proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/TableName.java b/proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/TableName.java index 476d710941..a90e8b3d16 100644 --- a/proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/TableName.java +++ b/proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/TableName.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -23,20 +23,32 @@ import java.util.ArrayList; import java.util.List; import java.util.Map; +import java.util.Objects; +import javax.annotation.Generated; -/** AUTO-GENERATED DOCUMENTATION AND CLASS */ -@javax.annotation.Generated("by GAPIC protoc plugin") +// AUTO-GENERATED DOCUMENTATION AND CLASS. +@Generated("by gapic-generator-java") public class TableName implements ResourceName { - - private static final PathTemplate PATH_TEMPLATE = + private static final PathTemplate PROJECT_DATASET_TABLE = PathTemplate.createWithoutUrlEncoding("projects/{project}/datasets/{dataset}/tables/{table}"); - private volatile Map fieldValuesMap; - private final String project; private final String dataset; private final String table; + @Deprecated + protected TableName() { + project = null; + dataset = null; + table = null; + } + + private TableName(Builder builder) { + project = Preconditions.checkNotNull(builder.getProject()); + dataset = Preconditions.checkNotNull(builder.getDataset()); + table = Preconditions.checkNotNull(builder.getTable()); + } + public String getProject() { return project; } @@ -57,12 +69,6 @@ public Builder toBuilder() { return new Builder(this); } - private TableName(Builder builder) { - project = Preconditions.checkNotNull(builder.getProject()); - dataset = Preconditions.checkNotNull(builder.getDataset()); - table = Preconditions.checkNotNull(builder.getTable()); - } - public static TableName of(String project, String dataset, String table) { return newBuilder().setProject(project).setDataset(dataset).setTable(table).build(); } @@ -76,7 +82,7 @@ public static TableName parse(String formattedString) { return null; } Map matchMap = - PATH_TEMPLATE.validatedMatch( + PROJECT_DATASET_TABLE.validatedMatch( formattedString, "TableName.parse: formattedString not in valid format"); return of(matchMap.get("project"), matchMap.get("dataset"), matchMap.get("table")); } @@ -90,7 +96,7 @@ public static List parseList(List formattedStrings) { } public static List toStringList(List values) { - List list = new ArrayList(values.size()); + List list = new ArrayList<>(values.size()); for (TableName value : values) { if (value == null) { list.add(""); @@ -102,17 +108,24 @@ public static List toStringList(List values) { } public static boolean isParsableFrom(String formattedString) { - return PATH_TEMPLATE.matches(formattedString); + return PROJECT_DATASET_TABLE.matches(formattedString); } + @Override public Map getFieldValuesMap() { if (fieldValuesMap == null) { synchronized (this) { if (fieldValuesMap == null) { ImmutableMap.Builder fieldMapBuilder = ImmutableMap.builder(); - fieldMapBuilder.put("project", project); - fieldMapBuilder.put("dataset", dataset); - fieldMapBuilder.put("table", table); + if (project != null) { + fieldMapBuilder.put("project", project); + } + if (dataset != null) { + fieldMapBuilder.put("dataset", dataset); + } + if (table != null) { + fieldMapBuilder.put("table", table); + } fieldValuesMap = fieldMapBuilder.build(); } } @@ -126,16 +139,44 @@ public String getFieldValue(String fieldName) { @Override public String toString() { - return PATH_TEMPLATE.instantiate("project", project, "dataset", dataset, "table", table); + return PROJECT_DATASET_TABLE.instantiate( + "project", project, "dataset", dataset, "table", table); } - /** Builder for TableName. */ - public static class Builder { + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o != null || getClass() == o.getClass()) { + TableName that = ((TableName) o); + return Objects.equals(this.project, that.project) + && Objects.equals(this.dataset, that.dataset) + && Objects.equals(this.table, that.table); + } + return false; + } + @Override + public int hashCode() { + int h = 1; + h *= 1000003; + h ^= Objects.hashCode(project); + h *= 1000003; + h ^= Objects.hashCode(dataset); + h *= 1000003; + h ^= Objects.hashCode(table); + return h; + } + + /** Builder for projects/{project}/datasets/{dataset}/tables/{table}. */ + public static class Builder { private String project; private String dataset; private String table; + protected Builder() {} + public String getProject() { return project; } @@ -163,8 +204,6 @@ public Builder setTable(String table) { return this; } - private Builder() {} - private Builder(TableName tableName) { project = tableName.project; dataset = tableName.dataset; @@ -175,30 +214,4 @@ public TableName build() { return new TableName(this); } } - - @Override - public boolean equals(Object o) { - if (o == this) { - return true; - } - if (o instanceof TableName) { - TableName that = (TableName) o; - return (this.project.equals(that.project)) - && (this.dataset.equals(that.dataset)) - && (this.table.equals(that.table)); - } - return false; - } - - @Override - public int hashCode() { - int h = 1; - h *= 1000003; - h ^= project.hashCode(); - h *= 1000003; - h ^= dataset.hashCode(); - h *= 1000003; - h ^= table.hashCode(); - return h; - } } diff --git a/proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/WriteStreamName.java b/proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/WriteStreamName.java index ba877cc86b..b3392aeadc 100644 --- a/proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/WriteStreamName.java +++ b/proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/WriteStreamName.java @@ -5,7 +5,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * https://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -23,22 +23,36 @@ import java.util.ArrayList; import java.util.List; import java.util.Map; +import java.util.Objects; +import javax.annotation.Generated; -/** AUTO-GENERATED DOCUMENTATION AND CLASS */ -@javax.annotation.Generated("by GAPIC protoc plugin") +// AUTO-GENERATED DOCUMENTATION AND CLASS. +@Generated("by gapic-generator-java") public class WriteStreamName implements ResourceName { - - private static final PathTemplate PATH_TEMPLATE = + private static final PathTemplate PROJECT_DATASET_TABLE_STREAM = PathTemplate.createWithoutUrlEncoding( "projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}"); - private volatile Map fieldValuesMap; - private final String project; private final String dataset; private final String table; private final String stream; + @Deprecated + protected WriteStreamName() { + project = null; + dataset = null; + table = null; + stream = null; + } + + private WriteStreamName(Builder builder) { + project = Preconditions.checkNotNull(builder.getProject()); + dataset = Preconditions.checkNotNull(builder.getDataset()); + table = Preconditions.checkNotNull(builder.getTable()); + stream = Preconditions.checkNotNull(builder.getStream()); + } + public String getProject() { return project; } @@ -63,13 +77,6 @@ public Builder toBuilder() { return new Builder(this); } - private WriteStreamName(Builder builder) { - project = Preconditions.checkNotNull(builder.getProject()); - dataset = Preconditions.checkNotNull(builder.getDataset()); - table = Preconditions.checkNotNull(builder.getTable()); - stream = Preconditions.checkNotNull(builder.getStream()); - } - public static WriteStreamName of(String project, String dataset, String table, String stream) { return newBuilder() .setProject(project) @@ -94,7 +101,7 @@ public static WriteStreamName parse(String formattedString) { return null; } Map matchMap = - PATH_TEMPLATE.validatedMatch( + PROJECT_DATASET_TABLE_STREAM.validatedMatch( formattedString, "WriteStreamName.parse: formattedString not in valid format"); return of( matchMap.get("project"), @@ -112,7 +119,7 @@ public static List parseList(List formattedStrings) { } public static List toStringList(List values) { - List list = new ArrayList(values.size()); + List list = new ArrayList<>(values.size()); for (WriteStreamName value : values) { if (value == null) { list.add(""); @@ -124,18 +131,27 @@ public static List toStringList(List values) { } public static boolean isParsableFrom(String formattedString) { - return PATH_TEMPLATE.matches(formattedString); + return PROJECT_DATASET_TABLE_STREAM.matches(formattedString); } + @Override public Map getFieldValuesMap() { if (fieldValuesMap == null) { synchronized (this) { if (fieldValuesMap == null) { ImmutableMap.Builder fieldMapBuilder = ImmutableMap.builder(); - fieldMapBuilder.put("project", project); - fieldMapBuilder.put("dataset", dataset); - fieldMapBuilder.put("table", table); - fieldMapBuilder.put("stream", stream); + if (project != null) { + fieldMapBuilder.put("project", project); + } + if (dataset != null) { + fieldMapBuilder.put("dataset", dataset); + } + if (table != null) { + fieldMapBuilder.put("table", table); + } + if (stream != null) { + fieldMapBuilder.put("stream", stream); + } fieldValuesMap = fieldMapBuilder.build(); } } @@ -149,18 +165,48 @@ public String getFieldValue(String fieldName) { @Override public String toString() { - return PATH_TEMPLATE.instantiate( + return PROJECT_DATASET_TABLE_STREAM.instantiate( "project", project, "dataset", dataset, "table", table, "stream", stream); } - /** Builder for WriteStreamName. */ - public static class Builder { + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o != null || getClass() == o.getClass()) { + WriteStreamName that = ((WriteStreamName) o); + return Objects.equals(this.project, that.project) + && Objects.equals(this.dataset, that.dataset) + && Objects.equals(this.table, that.table) + && Objects.equals(this.stream, that.stream); + } + return false; + } + @Override + public int hashCode() { + int h = 1; + h *= 1000003; + h ^= Objects.hashCode(project); + h *= 1000003; + h ^= Objects.hashCode(dataset); + h *= 1000003; + h ^= Objects.hashCode(table); + h *= 1000003; + h ^= Objects.hashCode(stream); + return h; + } + + /** Builder for projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}. */ + public static class Builder { private String project; private String dataset; private String table; private String stream; + protected Builder() {} + public String getProject() { return project; } @@ -197,8 +243,6 @@ public Builder setStream(String stream) { return this; } - private Builder() {} - private Builder(WriteStreamName writeStreamName) { project = writeStreamName.project; dataset = writeStreamName.dataset; @@ -210,33 +254,4 @@ public WriteStreamName build() { return new WriteStreamName(this); } } - - @Override - public boolean equals(Object o) { - if (o == this) { - return true; - } - if (o instanceof WriteStreamName) { - WriteStreamName that = (WriteStreamName) o; - return (this.project.equals(that.project)) - && (this.dataset.equals(that.dataset)) - && (this.table.equals(that.table)) - && (this.stream.equals(that.stream)); - } - return false; - } - - @Override - public int hashCode() { - int h = 1; - h *= 1000003; - h ^= project.hashCode(); - h *= 1000003; - h ^= dataset.hashCode(); - h *= 1000003; - h ^= table.hashCode(); - h *= 1000003; - h ^= stream.hashCode(); - return h; - } } diff --git a/synth.metadata b/synth.metadata index f233260b2e..1a03b15e23 100644 --- a/synth.metadata +++ b/synth.metadata @@ -18,32 +18,32 @@ "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "f8f975c7d43904e90d6c5f1684fdb6804400e641", - "internalRef": "345522380" + "sha": "abc43060f136ce77124754a48f367102e646844a", + "internalRef": "346405446" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "f8f975c7d43904e90d6c5f1684fdb6804400e641", - "internalRef": "345522380" + "sha": "abc43060f136ce77124754a48f367102e646844a", + "internalRef": "346405446" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "f8f975c7d43904e90d6c5f1684fdb6804400e641", - "internalRef": "345522380" + "sha": "abc43060f136ce77124754a48f367102e646844a", + "internalRef": "346405446" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "f8f975c7d43904e90d6c5f1684fdb6804400e641", - "internalRef": "345522380" + "sha": "abc43060f136ce77124754a48f367102e646844a", + "internalRef": "346405446" } }, { @@ -234,7 +234,6 @@ "proto-google-cloud-bigquerystorage-v1/src/main/java/com/google/cloud/bigquery/storage/v1/ReadRowsResponse.java", "proto-google-cloud-bigquerystorage-v1/src/main/java/com/google/cloud/bigquery/storage/v1/ReadRowsResponseOrBuilder.java", "proto-google-cloud-bigquerystorage-v1/src/main/java/com/google/cloud/bigquery/storage/v1/ReadSession.java", - "proto-google-cloud-bigquerystorage-v1/src/main/java/com/google/cloud/bigquery/storage/v1/ReadSessionName.java", "proto-google-cloud-bigquerystorage-v1/src/main/java/com/google/cloud/bigquery/storage/v1/ReadSessionOrBuilder.java", "proto-google-cloud-bigquerystorage-v1/src/main/java/com/google/cloud/bigquery/storage/v1/ReadStream.java", "proto-google-cloud-bigquerystorage-v1/src/main/java/com/google/cloud/bigquery/storage/v1/ReadStreamName.java", @@ -247,7 +246,6 @@ "proto-google-cloud-bigquerystorage-v1/src/main/java/com/google/cloud/bigquery/storage/v1/StreamProto.java", "proto-google-cloud-bigquerystorage-v1/src/main/java/com/google/cloud/bigquery/storage/v1/StreamStats.java", "proto-google-cloud-bigquerystorage-v1/src/main/java/com/google/cloud/bigquery/storage/v1/StreamStatsOrBuilder.java", - "proto-google-cloud-bigquerystorage-v1/src/main/java/com/google/cloud/bigquery/storage/v1/TableName.java", "proto-google-cloud-bigquerystorage-v1/src/main/java/com/google/cloud/bigquery/storage/v1/ThrottleState.java", "proto-google-cloud-bigquerystorage-v1/src/main/java/com/google/cloud/bigquery/storage/v1/ThrottleStateOrBuilder.java", "proto-google-cloud-bigquerystorage-v1/src/main/proto/google/cloud/bigquery/storage/v1/arrow.proto", @@ -268,9 +266,7 @@ "proto-google-cloud-bigquerystorage-v1beta1/src/main/java/com/google/cloud/bigquery/storage/v1beta1/AvroProto.java", "proto-google-cloud-bigquerystorage-v1beta1/src/main/java/com/google/cloud/bigquery/storage/v1beta1/ProjectName.java", "proto-google-cloud-bigquerystorage-v1beta1/src/main/java/com/google/cloud/bigquery/storage/v1beta1/ReadOptions.java", - "proto-google-cloud-bigquerystorage-v1beta1/src/main/java/com/google/cloud/bigquery/storage/v1beta1/ReadSessionName.java", "proto-google-cloud-bigquerystorage-v1beta1/src/main/java/com/google/cloud/bigquery/storage/v1beta1/Storage.java", - "proto-google-cloud-bigquerystorage-v1beta1/src/main/java/com/google/cloud/bigquery/storage/v1beta1/StreamName.java", "proto-google-cloud-bigquerystorage-v1beta1/src/main/java/com/google/cloud/bigquery/storage/v1beta1/TableReferenceProto.java", "proto-google-cloud-bigquerystorage-v1beta1/src/main/proto/google/cloud/bigquery/storage/v1beta1/arrow.proto", "proto-google-cloud-bigquerystorage-v1beta1/src/main/proto/google/cloud/bigquery/storage/v1beta1/avro.proto", @@ -323,7 +319,6 @@ "proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/ReadRowsResponse.java", "proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/ReadRowsResponseOrBuilder.java", "proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/ReadSession.java", - "proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/ReadSessionName.java", "proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/ReadSessionOrBuilder.java", "proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/ReadStream.java", "proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/ReadStreamName.java", From f965df92382b5d468af4912879310e005af650df Mon Sep 17 00:00:00 2001 From: yoshi-automation Date: Fri, 11 Dec 2020 16:48:00 -0800 Subject: [PATCH 6/6] chore: update gapic-generator-java to 0.0.11 Committer: @miraleung PiperOrigin-RevId: 347036369 Source-Author: Google APIs Source-Date: Fri Dec 11 11:13:47 2020 -0800 Source-Repo: googleapis/googleapis Source-Sha: 6d65640b1fcbdf26ea76cb720de0ac138cae9bed Source-Link: https://github.com/googleapis/googleapis/commit/6d65640b1fcbdf26ea76cb720de0ac138cae9bed --- .../v1/BaseBigQueryReadClientTest.java | 8 +- .../v1beta2/BaseBigQueryReadClientTest.java | 4 +- .../bigquery/storage/v1/ReadSessionName.java | 223 ++++++++++++++++++ .../cloud/bigquery/storage/v1/TableName.java | 217 +++++++++++++++++ .../storage/v1beta1/ReadSessionName.java | 223 ++++++++++++++++++ .../bigquery/storage/v1beta1/StreamName.java | 223 ++++++++++++++++++ .../storage/v1beta2/ReadSessionName.java | 223 ++++++++++++++++++ synth.metadata | 21 +- 8 files changed, 1128 insertions(+), 14 deletions(-) create mode 100644 proto-google-cloud-bigquerystorage-v1/src/main/java/com/google/cloud/bigquery/storage/v1/ReadSessionName.java create mode 100644 proto-google-cloud-bigquerystorage-v1/src/main/java/com/google/cloud/bigquery/storage/v1/TableName.java create mode 100644 proto-google-cloud-bigquerystorage-v1beta1/src/main/java/com/google/cloud/bigquery/storage/v1beta1/ReadSessionName.java create mode 100644 proto-google-cloud-bigquerystorage-v1beta1/src/main/java/com/google/cloud/bigquery/storage/v1beta1/StreamName.java create mode 100644 proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/ReadSessionName.java diff --git a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1/BaseBigQueryReadClientTest.java b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1/BaseBigQueryReadClientTest.java index 647e921610..5bb419c025 100644 --- a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1/BaseBigQueryReadClientTest.java +++ b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1/BaseBigQueryReadClientTest.java @@ -85,9 +85,9 @@ public void tearDown() throws Exception { public void createReadSessionTest() throws Exception { ReadSession expectedResponse = ReadSession.newBuilder() - .setName("name3373707") + .setName(ReadSessionName.of("[PROJECT]", "[LOCATION]", "[SESSION]").toString()) .setExpireTime(Timestamp.newBuilder().build()) - .setTable("table110115790") + .setTable(TableName.of("[PROJECT]", "[DATASET]", "[TABLE]").toString()) .addAllStreams(new ArrayList()) .build(); mockBigQueryRead.addResponse(expectedResponse); @@ -132,9 +132,9 @@ public void createReadSessionExceptionTest() throws Exception { public void createReadSessionTest2() throws Exception { ReadSession expectedResponse = ReadSession.newBuilder() - .setName("name3373707") + .setName(ReadSessionName.of("[PROJECT]", "[LOCATION]", "[SESSION]").toString()) .setExpireTime(Timestamp.newBuilder().build()) - .setTable("table110115790") + .setTable(TableName.of("[PROJECT]", "[DATASET]", "[TABLE]").toString()) .addAllStreams(new ArrayList()) .build(); mockBigQueryRead.addResponse(expectedResponse); diff --git a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/BaseBigQueryReadClientTest.java b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/BaseBigQueryReadClientTest.java index e144d138a7..5330bd51ee 100644 --- a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/BaseBigQueryReadClientTest.java +++ b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/BaseBigQueryReadClientTest.java @@ -85,7 +85,7 @@ public void tearDown() throws Exception { public void createReadSessionTest() throws Exception { ReadSession expectedResponse = ReadSession.newBuilder() - .setName("name3373707") + .setName(ReadSessionName.of("[PROJECT]", "[LOCATION]", "[SESSION]").toString()) .setExpireTime(Timestamp.newBuilder().build()) .setTable(TableName.of("[PROJECT]", "[DATASET]", "[TABLE]").toString()) .addAllStreams(new ArrayList()) @@ -132,7 +132,7 @@ public void createReadSessionExceptionTest() throws Exception { public void createReadSessionTest2() throws Exception { ReadSession expectedResponse = ReadSession.newBuilder() - .setName("name3373707") + .setName(ReadSessionName.of("[PROJECT]", "[LOCATION]", "[SESSION]").toString()) .setExpireTime(Timestamp.newBuilder().build()) .setTable(TableName.of("[PROJECT]", "[DATASET]", "[TABLE]").toString()) .addAllStreams(new ArrayList()) diff --git a/proto-google-cloud-bigquerystorage-v1/src/main/java/com/google/cloud/bigquery/storage/v1/ReadSessionName.java b/proto-google-cloud-bigquerystorage-v1/src/main/java/com/google/cloud/bigquery/storage/v1/ReadSessionName.java new file mode 100644 index 0000000000..76d1ac5645 --- /dev/null +++ b/proto-google-cloud-bigquerystorage-v1/src/main/java/com/google/cloud/bigquery/storage/v1/ReadSessionName.java @@ -0,0 +1,223 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.storage.v1; + +import com.google.api.pathtemplate.PathTemplate; +import com.google.api.resourcenames.ResourceName; +import com.google.common.base.Preconditions; +import com.google.common.collect.ImmutableMap; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import javax.annotation.Generated; + +// AUTO-GENERATED DOCUMENTATION AND CLASS. +@Generated("by gapic-generator-java") +public class ReadSessionName implements ResourceName { + private static final PathTemplate PROJECT_LOCATION_SESSION = + PathTemplate.createWithoutUrlEncoding( + "projects/{project}/locations/{location}/sessions/{session}"); + private volatile Map fieldValuesMap; + private final String project; + private final String location; + private final String session; + + @Deprecated + protected ReadSessionName() { + project = null; + location = null; + session = null; + } + + private ReadSessionName(Builder builder) { + project = Preconditions.checkNotNull(builder.getProject()); + location = Preconditions.checkNotNull(builder.getLocation()); + session = Preconditions.checkNotNull(builder.getSession()); + } + + public String getProject() { + return project; + } + + public String getLocation() { + return location; + } + + public String getSession() { + return session; + } + + public static Builder newBuilder() { + return new Builder(); + } + + public Builder toBuilder() { + return new Builder(this); + } + + public static ReadSessionName of(String project, String location, String session) { + return newBuilder().setProject(project).setLocation(location).setSession(session).build(); + } + + public static String format(String project, String location, String session) { + return newBuilder() + .setProject(project) + .setLocation(location) + .setSession(session) + .build() + .toString(); + } + + public static ReadSessionName parse(String formattedString) { + if (formattedString.isEmpty()) { + return null; + } + Map matchMap = + PROJECT_LOCATION_SESSION.validatedMatch( + formattedString, "ReadSessionName.parse: formattedString not in valid format"); + return of(matchMap.get("project"), matchMap.get("location"), matchMap.get("session")); + } + + public static List parseList(List formattedStrings) { + List list = new ArrayList<>(formattedStrings.size()); + for (String formattedString : formattedStrings) { + list.add(parse(formattedString)); + } + return list; + } + + public static List toStringList(List values) { + List list = new ArrayList<>(values.size()); + for (ReadSessionName value : values) { + if (value == null) { + list.add(""); + } else { + list.add(value.toString()); + } + } + return list; + } + + public static boolean isParsableFrom(String formattedString) { + return PROJECT_LOCATION_SESSION.matches(formattedString); + } + + @Override + public Map getFieldValuesMap() { + if (fieldValuesMap == null) { + synchronized (this) { + if (fieldValuesMap == null) { + ImmutableMap.Builder fieldMapBuilder = ImmutableMap.builder(); + if (project != null) { + fieldMapBuilder.put("project", project); + } + if (location != null) { + fieldMapBuilder.put("location", location); + } + if (session != null) { + fieldMapBuilder.put("session", session); + } + fieldValuesMap = fieldMapBuilder.build(); + } + } + } + return fieldValuesMap; + } + + public String getFieldValue(String fieldName) { + return getFieldValuesMap().get(fieldName); + } + + @Override + public String toString() { + return PROJECT_LOCATION_SESSION.instantiate( + "project", project, "location", location, "session", session); + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o != null || getClass() == o.getClass()) { + ReadSessionName that = ((ReadSessionName) o); + return Objects.equals(this.project, that.project) + && Objects.equals(this.location, that.location) + && Objects.equals(this.session, that.session); + } + return false; + } + + @Override + public int hashCode() { + int h = 1; + h *= 1000003; + h ^= Objects.hashCode(project); + h *= 1000003; + h ^= Objects.hashCode(location); + h *= 1000003; + h ^= Objects.hashCode(session); + return h; + } + + /** Builder for projects/{project}/locations/{location}/sessions/{session}. */ + public static class Builder { + private String project; + private String location; + private String session; + + protected Builder() {} + + public String getProject() { + return project; + } + + public String getLocation() { + return location; + } + + public String getSession() { + return session; + } + + public Builder setProject(String project) { + this.project = project; + return this; + } + + public Builder setLocation(String location) { + this.location = location; + return this; + } + + public Builder setSession(String session) { + this.session = session; + return this; + } + + private Builder(ReadSessionName readSessionName) { + project = readSessionName.project; + location = readSessionName.location; + session = readSessionName.session; + } + + public ReadSessionName build() { + return new ReadSessionName(this); + } + } +} diff --git a/proto-google-cloud-bigquerystorage-v1/src/main/java/com/google/cloud/bigquery/storage/v1/TableName.java b/proto-google-cloud-bigquerystorage-v1/src/main/java/com/google/cloud/bigquery/storage/v1/TableName.java new file mode 100644 index 0000000000..a2dc1febae --- /dev/null +++ b/proto-google-cloud-bigquerystorage-v1/src/main/java/com/google/cloud/bigquery/storage/v1/TableName.java @@ -0,0 +1,217 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.storage.v1; + +import com.google.api.pathtemplate.PathTemplate; +import com.google.api.resourcenames.ResourceName; +import com.google.common.base.Preconditions; +import com.google.common.collect.ImmutableMap; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import javax.annotation.Generated; + +// AUTO-GENERATED DOCUMENTATION AND CLASS. +@Generated("by gapic-generator-java") +public class TableName implements ResourceName { + private static final PathTemplate PROJECT_DATASET_TABLE = + PathTemplate.createWithoutUrlEncoding("projects/{project}/datasets/{dataset}/tables/{table}"); + private volatile Map fieldValuesMap; + private final String project; + private final String dataset; + private final String table; + + @Deprecated + protected TableName() { + project = null; + dataset = null; + table = null; + } + + private TableName(Builder builder) { + project = Preconditions.checkNotNull(builder.getProject()); + dataset = Preconditions.checkNotNull(builder.getDataset()); + table = Preconditions.checkNotNull(builder.getTable()); + } + + public String getProject() { + return project; + } + + public String getDataset() { + return dataset; + } + + public String getTable() { + return table; + } + + public static Builder newBuilder() { + return new Builder(); + } + + public Builder toBuilder() { + return new Builder(this); + } + + public static TableName of(String project, String dataset, String table) { + return newBuilder().setProject(project).setDataset(dataset).setTable(table).build(); + } + + public static String format(String project, String dataset, String table) { + return newBuilder().setProject(project).setDataset(dataset).setTable(table).build().toString(); + } + + public static TableName parse(String formattedString) { + if (formattedString.isEmpty()) { + return null; + } + Map matchMap = + PROJECT_DATASET_TABLE.validatedMatch( + formattedString, "TableName.parse: formattedString not in valid format"); + return of(matchMap.get("project"), matchMap.get("dataset"), matchMap.get("table")); + } + + public static List parseList(List formattedStrings) { + List list = new ArrayList<>(formattedStrings.size()); + for (String formattedString : formattedStrings) { + list.add(parse(formattedString)); + } + return list; + } + + public static List toStringList(List values) { + List list = new ArrayList<>(values.size()); + for (TableName value : values) { + if (value == null) { + list.add(""); + } else { + list.add(value.toString()); + } + } + return list; + } + + public static boolean isParsableFrom(String formattedString) { + return PROJECT_DATASET_TABLE.matches(formattedString); + } + + @Override + public Map getFieldValuesMap() { + if (fieldValuesMap == null) { + synchronized (this) { + if (fieldValuesMap == null) { + ImmutableMap.Builder fieldMapBuilder = ImmutableMap.builder(); + if (project != null) { + fieldMapBuilder.put("project", project); + } + if (dataset != null) { + fieldMapBuilder.put("dataset", dataset); + } + if (table != null) { + fieldMapBuilder.put("table", table); + } + fieldValuesMap = fieldMapBuilder.build(); + } + } + } + return fieldValuesMap; + } + + public String getFieldValue(String fieldName) { + return getFieldValuesMap().get(fieldName); + } + + @Override + public String toString() { + return PROJECT_DATASET_TABLE.instantiate( + "project", project, "dataset", dataset, "table", table); + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o != null || getClass() == o.getClass()) { + TableName that = ((TableName) o); + return Objects.equals(this.project, that.project) + && Objects.equals(this.dataset, that.dataset) + && Objects.equals(this.table, that.table); + } + return false; + } + + @Override + public int hashCode() { + int h = 1; + h *= 1000003; + h ^= Objects.hashCode(project); + h *= 1000003; + h ^= Objects.hashCode(dataset); + h *= 1000003; + h ^= Objects.hashCode(table); + return h; + } + + /** Builder for projects/{project}/datasets/{dataset}/tables/{table}. */ + public static class Builder { + private String project; + private String dataset; + private String table; + + protected Builder() {} + + public String getProject() { + return project; + } + + public String getDataset() { + return dataset; + } + + public String getTable() { + return table; + } + + public Builder setProject(String project) { + this.project = project; + return this; + } + + public Builder setDataset(String dataset) { + this.dataset = dataset; + return this; + } + + public Builder setTable(String table) { + this.table = table; + return this; + } + + private Builder(TableName tableName) { + project = tableName.project; + dataset = tableName.dataset; + table = tableName.table; + } + + public TableName build() { + return new TableName(this); + } + } +} diff --git a/proto-google-cloud-bigquerystorage-v1beta1/src/main/java/com/google/cloud/bigquery/storage/v1beta1/ReadSessionName.java b/proto-google-cloud-bigquerystorage-v1beta1/src/main/java/com/google/cloud/bigquery/storage/v1beta1/ReadSessionName.java new file mode 100644 index 0000000000..467eddb925 --- /dev/null +++ b/proto-google-cloud-bigquerystorage-v1beta1/src/main/java/com/google/cloud/bigquery/storage/v1beta1/ReadSessionName.java @@ -0,0 +1,223 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.storage.v1beta1; + +import com.google.api.pathtemplate.PathTemplate; +import com.google.api.resourcenames.ResourceName; +import com.google.common.base.Preconditions; +import com.google.common.collect.ImmutableMap; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import javax.annotation.Generated; + +// AUTO-GENERATED DOCUMENTATION AND CLASS. +@Generated("by gapic-generator-java") +public class ReadSessionName implements ResourceName { + private static final PathTemplate PROJECT_LOCATION_SESSION = + PathTemplate.createWithoutUrlEncoding( + "projects/{project}/locations/{location}/sessions/{session}"); + private volatile Map fieldValuesMap; + private final String project; + private final String location; + private final String session; + + @Deprecated + protected ReadSessionName() { + project = null; + location = null; + session = null; + } + + private ReadSessionName(Builder builder) { + project = Preconditions.checkNotNull(builder.getProject()); + location = Preconditions.checkNotNull(builder.getLocation()); + session = Preconditions.checkNotNull(builder.getSession()); + } + + public String getProject() { + return project; + } + + public String getLocation() { + return location; + } + + public String getSession() { + return session; + } + + public static Builder newBuilder() { + return new Builder(); + } + + public Builder toBuilder() { + return new Builder(this); + } + + public static ReadSessionName of(String project, String location, String session) { + return newBuilder().setProject(project).setLocation(location).setSession(session).build(); + } + + public static String format(String project, String location, String session) { + return newBuilder() + .setProject(project) + .setLocation(location) + .setSession(session) + .build() + .toString(); + } + + public static ReadSessionName parse(String formattedString) { + if (formattedString.isEmpty()) { + return null; + } + Map matchMap = + PROJECT_LOCATION_SESSION.validatedMatch( + formattedString, "ReadSessionName.parse: formattedString not in valid format"); + return of(matchMap.get("project"), matchMap.get("location"), matchMap.get("session")); + } + + public static List parseList(List formattedStrings) { + List list = new ArrayList<>(formattedStrings.size()); + for (String formattedString : formattedStrings) { + list.add(parse(formattedString)); + } + return list; + } + + public static List toStringList(List values) { + List list = new ArrayList<>(values.size()); + for (ReadSessionName value : values) { + if (value == null) { + list.add(""); + } else { + list.add(value.toString()); + } + } + return list; + } + + public static boolean isParsableFrom(String formattedString) { + return PROJECT_LOCATION_SESSION.matches(formattedString); + } + + @Override + public Map getFieldValuesMap() { + if (fieldValuesMap == null) { + synchronized (this) { + if (fieldValuesMap == null) { + ImmutableMap.Builder fieldMapBuilder = ImmutableMap.builder(); + if (project != null) { + fieldMapBuilder.put("project", project); + } + if (location != null) { + fieldMapBuilder.put("location", location); + } + if (session != null) { + fieldMapBuilder.put("session", session); + } + fieldValuesMap = fieldMapBuilder.build(); + } + } + } + return fieldValuesMap; + } + + public String getFieldValue(String fieldName) { + return getFieldValuesMap().get(fieldName); + } + + @Override + public String toString() { + return PROJECT_LOCATION_SESSION.instantiate( + "project", project, "location", location, "session", session); + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o != null || getClass() == o.getClass()) { + ReadSessionName that = ((ReadSessionName) o); + return Objects.equals(this.project, that.project) + && Objects.equals(this.location, that.location) + && Objects.equals(this.session, that.session); + } + return false; + } + + @Override + public int hashCode() { + int h = 1; + h *= 1000003; + h ^= Objects.hashCode(project); + h *= 1000003; + h ^= Objects.hashCode(location); + h *= 1000003; + h ^= Objects.hashCode(session); + return h; + } + + /** Builder for projects/{project}/locations/{location}/sessions/{session}. */ + public static class Builder { + private String project; + private String location; + private String session; + + protected Builder() {} + + public String getProject() { + return project; + } + + public String getLocation() { + return location; + } + + public String getSession() { + return session; + } + + public Builder setProject(String project) { + this.project = project; + return this; + } + + public Builder setLocation(String location) { + this.location = location; + return this; + } + + public Builder setSession(String session) { + this.session = session; + return this; + } + + private Builder(ReadSessionName readSessionName) { + project = readSessionName.project; + location = readSessionName.location; + session = readSessionName.session; + } + + public ReadSessionName build() { + return new ReadSessionName(this); + } + } +} diff --git a/proto-google-cloud-bigquerystorage-v1beta1/src/main/java/com/google/cloud/bigquery/storage/v1beta1/StreamName.java b/proto-google-cloud-bigquerystorage-v1beta1/src/main/java/com/google/cloud/bigquery/storage/v1beta1/StreamName.java new file mode 100644 index 0000000000..81f6ac5ec7 --- /dev/null +++ b/proto-google-cloud-bigquerystorage-v1beta1/src/main/java/com/google/cloud/bigquery/storage/v1beta1/StreamName.java @@ -0,0 +1,223 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.storage.v1beta1; + +import com.google.api.pathtemplate.PathTemplate; +import com.google.api.resourcenames.ResourceName; +import com.google.common.base.Preconditions; +import com.google.common.collect.ImmutableMap; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import javax.annotation.Generated; + +// AUTO-GENERATED DOCUMENTATION AND CLASS. +@Generated("by gapic-generator-java") +public class StreamName implements ResourceName { + private static final PathTemplate PROJECT_LOCATION_STREAM = + PathTemplate.createWithoutUrlEncoding( + "projects/{project}/locations/{location}/streams/{stream}"); + private volatile Map fieldValuesMap; + private final String project; + private final String location; + private final String stream; + + @Deprecated + protected StreamName() { + project = null; + location = null; + stream = null; + } + + private StreamName(Builder builder) { + project = Preconditions.checkNotNull(builder.getProject()); + location = Preconditions.checkNotNull(builder.getLocation()); + stream = Preconditions.checkNotNull(builder.getStream()); + } + + public String getProject() { + return project; + } + + public String getLocation() { + return location; + } + + public String getStream() { + return stream; + } + + public static Builder newBuilder() { + return new Builder(); + } + + public Builder toBuilder() { + return new Builder(this); + } + + public static StreamName of(String project, String location, String stream) { + return newBuilder().setProject(project).setLocation(location).setStream(stream).build(); + } + + public static String format(String project, String location, String stream) { + return newBuilder() + .setProject(project) + .setLocation(location) + .setStream(stream) + .build() + .toString(); + } + + public static StreamName parse(String formattedString) { + if (formattedString.isEmpty()) { + return null; + } + Map matchMap = + PROJECT_LOCATION_STREAM.validatedMatch( + formattedString, "StreamName.parse: formattedString not in valid format"); + return of(matchMap.get("project"), matchMap.get("location"), matchMap.get("stream")); + } + + public static List parseList(List formattedStrings) { + List list = new ArrayList<>(formattedStrings.size()); + for (String formattedString : formattedStrings) { + list.add(parse(formattedString)); + } + return list; + } + + public static List toStringList(List values) { + List list = new ArrayList<>(values.size()); + for (StreamName value : values) { + if (value == null) { + list.add(""); + } else { + list.add(value.toString()); + } + } + return list; + } + + public static boolean isParsableFrom(String formattedString) { + return PROJECT_LOCATION_STREAM.matches(formattedString); + } + + @Override + public Map getFieldValuesMap() { + if (fieldValuesMap == null) { + synchronized (this) { + if (fieldValuesMap == null) { + ImmutableMap.Builder fieldMapBuilder = ImmutableMap.builder(); + if (project != null) { + fieldMapBuilder.put("project", project); + } + if (location != null) { + fieldMapBuilder.put("location", location); + } + if (stream != null) { + fieldMapBuilder.put("stream", stream); + } + fieldValuesMap = fieldMapBuilder.build(); + } + } + } + return fieldValuesMap; + } + + public String getFieldValue(String fieldName) { + return getFieldValuesMap().get(fieldName); + } + + @Override + public String toString() { + return PROJECT_LOCATION_STREAM.instantiate( + "project", project, "location", location, "stream", stream); + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o != null || getClass() == o.getClass()) { + StreamName that = ((StreamName) o); + return Objects.equals(this.project, that.project) + && Objects.equals(this.location, that.location) + && Objects.equals(this.stream, that.stream); + } + return false; + } + + @Override + public int hashCode() { + int h = 1; + h *= 1000003; + h ^= Objects.hashCode(project); + h *= 1000003; + h ^= Objects.hashCode(location); + h *= 1000003; + h ^= Objects.hashCode(stream); + return h; + } + + /** Builder for projects/{project}/locations/{location}/streams/{stream}. */ + public static class Builder { + private String project; + private String location; + private String stream; + + protected Builder() {} + + public String getProject() { + return project; + } + + public String getLocation() { + return location; + } + + public String getStream() { + return stream; + } + + public Builder setProject(String project) { + this.project = project; + return this; + } + + public Builder setLocation(String location) { + this.location = location; + return this; + } + + public Builder setStream(String stream) { + this.stream = stream; + return this; + } + + private Builder(StreamName streamName) { + project = streamName.project; + location = streamName.location; + stream = streamName.stream; + } + + public StreamName build() { + return new StreamName(this); + } + } +} diff --git a/proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/ReadSessionName.java b/proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/ReadSessionName.java new file mode 100644 index 0000000000..e031168f6d --- /dev/null +++ b/proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/ReadSessionName.java @@ -0,0 +1,223 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.storage.v1beta2; + +import com.google.api.pathtemplate.PathTemplate; +import com.google.api.resourcenames.ResourceName; +import com.google.common.base.Preconditions; +import com.google.common.collect.ImmutableMap; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import javax.annotation.Generated; + +// AUTO-GENERATED DOCUMENTATION AND CLASS. +@Generated("by gapic-generator-java") +public class ReadSessionName implements ResourceName { + private static final PathTemplate PROJECT_LOCATION_SESSION = + PathTemplate.createWithoutUrlEncoding( + "projects/{project}/locations/{location}/sessions/{session}"); + private volatile Map fieldValuesMap; + private final String project; + private final String location; + private final String session; + + @Deprecated + protected ReadSessionName() { + project = null; + location = null; + session = null; + } + + private ReadSessionName(Builder builder) { + project = Preconditions.checkNotNull(builder.getProject()); + location = Preconditions.checkNotNull(builder.getLocation()); + session = Preconditions.checkNotNull(builder.getSession()); + } + + public String getProject() { + return project; + } + + public String getLocation() { + return location; + } + + public String getSession() { + return session; + } + + public static Builder newBuilder() { + return new Builder(); + } + + public Builder toBuilder() { + return new Builder(this); + } + + public static ReadSessionName of(String project, String location, String session) { + return newBuilder().setProject(project).setLocation(location).setSession(session).build(); + } + + public static String format(String project, String location, String session) { + return newBuilder() + .setProject(project) + .setLocation(location) + .setSession(session) + .build() + .toString(); + } + + public static ReadSessionName parse(String formattedString) { + if (formattedString.isEmpty()) { + return null; + } + Map matchMap = + PROJECT_LOCATION_SESSION.validatedMatch( + formattedString, "ReadSessionName.parse: formattedString not in valid format"); + return of(matchMap.get("project"), matchMap.get("location"), matchMap.get("session")); + } + + public static List parseList(List formattedStrings) { + List list = new ArrayList<>(formattedStrings.size()); + for (String formattedString : formattedStrings) { + list.add(parse(formattedString)); + } + return list; + } + + public static List toStringList(List values) { + List list = new ArrayList<>(values.size()); + for (ReadSessionName value : values) { + if (value == null) { + list.add(""); + } else { + list.add(value.toString()); + } + } + return list; + } + + public static boolean isParsableFrom(String formattedString) { + return PROJECT_LOCATION_SESSION.matches(formattedString); + } + + @Override + public Map getFieldValuesMap() { + if (fieldValuesMap == null) { + synchronized (this) { + if (fieldValuesMap == null) { + ImmutableMap.Builder fieldMapBuilder = ImmutableMap.builder(); + if (project != null) { + fieldMapBuilder.put("project", project); + } + if (location != null) { + fieldMapBuilder.put("location", location); + } + if (session != null) { + fieldMapBuilder.put("session", session); + } + fieldValuesMap = fieldMapBuilder.build(); + } + } + } + return fieldValuesMap; + } + + public String getFieldValue(String fieldName) { + return getFieldValuesMap().get(fieldName); + } + + @Override + public String toString() { + return PROJECT_LOCATION_SESSION.instantiate( + "project", project, "location", location, "session", session); + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o != null || getClass() == o.getClass()) { + ReadSessionName that = ((ReadSessionName) o); + return Objects.equals(this.project, that.project) + && Objects.equals(this.location, that.location) + && Objects.equals(this.session, that.session); + } + return false; + } + + @Override + public int hashCode() { + int h = 1; + h *= 1000003; + h ^= Objects.hashCode(project); + h *= 1000003; + h ^= Objects.hashCode(location); + h *= 1000003; + h ^= Objects.hashCode(session); + return h; + } + + /** Builder for projects/{project}/locations/{location}/sessions/{session}. */ + public static class Builder { + private String project; + private String location; + private String session; + + protected Builder() {} + + public String getProject() { + return project; + } + + public String getLocation() { + return location; + } + + public String getSession() { + return session; + } + + public Builder setProject(String project) { + this.project = project; + return this; + } + + public Builder setLocation(String location) { + this.location = location; + return this; + } + + public Builder setSession(String session) { + this.session = session; + return this; + } + + private Builder(ReadSessionName readSessionName) { + project = readSessionName.project; + location = readSessionName.location; + session = readSessionName.session; + } + + public ReadSessionName build() { + return new ReadSessionName(this); + } + } +} diff --git a/synth.metadata b/synth.metadata index 1a03b15e23..77cd045819 100644 --- a/synth.metadata +++ b/synth.metadata @@ -18,32 +18,32 @@ "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "abc43060f136ce77124754a48f367102e646844a", - "internalRef": "346405446" + "sha": "6d65640b1fcbdf26ea76cb720de0ac138cae9bed", + "internalRef": "347036369" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "abc43060f136ce77124754a48f367102e646844a", - "internalRef": "346405446" + "sha": "6d65640b1fcbdf26ea76cb720de0ac138cae9bed", + "internalRef": "347036369" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "abc43060f136ce77124754a48f367102e646844a", - "internalRef": "346405446" + "sha": "6d65640b1fcbdf26ea76cb720de0ac138cae9bed", + "internalRef": "347036369" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "abc43060f136ce77124754a48f367102e646844a", - "internalRef": "346405446" + "sha": "6d65640b1fcbdf26ea76cb720de0ac138cae9bed", + "internalRef": "347036369" } }, { @@ -234,6 +234,7 @@ "proto-google-cloud-bigquerystorage-v1/src/main/java/com/google/cloud/bigquery/storage/v1/ReadRowsResponse.java", "proto-google-cloud-bigquerystorage-v1/src/main/java/com/google/cloud/bigquery/storage/v1/ReadRowsResponseOrBuilder.java", "proto-google-cloud-bigquerystorage-v1/src/main/java/com/google/cloud/bigquery/storage/v1/ReadSession.java", + "proto-google-cloud-bigquerystorage-v1/src/main/java/com/google/cloud/bigquery/storage/v1/ReadSessionName.java", "proto-google-cloud-bigquerystorage-v1/src/main/java/com/google/cloud/bigquery/storage/v1/ReadSessionOrBuilder.java", "proto-google-cloud-bigquerystorage-v1/src/main/java/com/google/cloud/bigquery/storage/v1/ReadStream.java", "proto-google-cloud-bigquerystorage-v1/src/main/java/com/google/cloud/bigquery/storage/v1/ReadStreamName.java", @@ -246,6 +247,7 @@ "proto-google-cloud-bigquerystorage-v1/src/main/java/com/google/cloud/bigquery/storage/v1/StreamProto.java", "proto-google-cloud-bigquerystorage-v1/src/main/java/com/google/cloud/bigquery/storage/v1/StreamStats.java", "proto-google-cloud-bigquerystorage-v1/src/main/java/com/google/cloud/bigquery/storage/v1/StreamStatsOrBuilder.java", + "proto-google-cloud-bigquerystorage-v1/src/main/java/com/google/cloud/bigquery/storage/v1/TableName.java", "proto-google-cloud-bigquerystorage-v1/src/main/java/com/google/cloud/bigquery/storage/v1/ThrottleState.java", "proto-google-cloud-bigquerystorage-v1/src/main/java/com/google/cloud/bigquery/storage/v1/ThrottleStateOrBuilder.java", "proto-google-cloud-bigquerystorage-v1/src/main/proto/google/cloud/bigquery/storage/v1/arrow.proto", @@ -266,7 +268,9 @@ "proto-google-cloud-bigquerystorage-v1beta1/src/main/java/com/google/cloud/bigquery/storage/v1beta1/AvroProto.java", "proto-google-cloud-bigquerystorage-v1beta1/src/main/java/com/google/cloud/bigquery/storage/v1beta1/ProjectName.java", "proto-google-cloud-bigquerystorage-v1beta1/src/main/java/com/google/cloud/bigquery/storage/v1beta1/ReadOptions.java", + "proto-google-cloud-bigquerystorage-v1beta1/src/main/java/com/google/cloud/bigquery/storage/v1beta1/ReadSessionName.java", "proto-google-cloud-bigquerystorage-v1beta1/src/main/java/com/google/cloud/bigquery/storage/v1beta1/Storage.java", + "proto-google-cloud-bigquerystorage-v1beta1/src/main/java/com/google/cloud/bigquery/storage/v1beta1/StreamName.java", "proto-google-cloud-bigquerystorage-v1beta1/src/main/java/com/google/cloud/bigquery/storage/v1beta1/TableReferenceProto.java", "proto-google-cloud-bigquerystorage-v1beta1/src/main/proto/google/cloud/bigquery/storage/v1beta1/arrow.proto", "proto-google-cloud-bigquerystorage-v1beta1/src/main/proto/google/cloud/bigquery/storage/v1beta1/avro.proto", @@ -319,6 +323,7 @@ "proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/ReadRowsResponse.java", "proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/ReadRowsResponseOrBuilder.java", "proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/ReadSession.java", + "proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/ReadSessionName.java", "proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/ReadSessionOrBuilder.java", "proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/ReadStream.java", "proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/ReadStreamName.java",