diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/BaseBigQueryReadClient.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/BaseBigQueryReadClient.java index f4ec3cfd1c..d34aaf2a27 100644 --- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/BaseBigQueryReadClient.java +++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/BaseBigQueryReadClient.java @@ -35,6 +35,16 @@ *

This class provides the ability to make remote calls to the backing service through method * calls that map to API methods. Sample code to get started: * + *

{@code
+ * try (BaseBigQueryReadClient baseBigQueryReadClient = BaseBigQueryReadClient.create()) {
+ *   ProjectName parent = ProjectName.of("[PROJECT]");
+ *   ReadSession readSession = ReadSession.newBuilder().build();
+ *   int maxStreamCount = 940837515;
+ *   ReadSession response =
+ *       baseBigQueryReadClient.createReadSession(parent, readSession, maxStreamCount);
+ * }
+ * }
+ * *

Note: close() needs to be called on the BaseBigQueryReadClient object to clean up resources * such as threads. In the example above, try-with-resources is used, which automatically calls * close(). @@ -156,6 +166,18 @@ public BigQueryReadStub getStub() { *

Read sessions automatically expire 24 hours after they are created and do not require manual * clean-up by the caller. * + *

Sample code: + * + *

{@code
+   * try (BaseBigQueryReadClient baseBigQueryReadClient = BaseBigQueryReadClient.create()) {
+   *   ProjectName parent = ProjectName.of("[PROJECT]");
+   *   ReadSession readSession = ReadSession.newBuilder().build();
+   *   int maxStreamCount = 940837515;
+   *   ReadSession response =
+   *       baseBigQueryReadClient.createReadSession(parent, readSession, maxStreamCount);
+   * }
+   * }
+ * * @param parent Required. The request project that owns the session, in the form of * `projects/{project_id}`. * @param readSession Required. Session to be created. @@ -197,6 +219,18 @@ public final ReadSession createReadSession( *

Read sessions automatically expire 24 hours after they are created and do not require manual * clean-up by the caller. * + *

Sample code: + * + *

{@code
+   * try (BaseBigQueryReadClient baseBigQueryReadClient = BaseBigQueryReadClient.create()) {
+   *   String parent = ProjectName.of("[PROJECT]").toString();
+   *   ReadSession readSession = ReadSession.newBuilder().build();
+   *   int maxStreamCount = 940837515;
+   *   ReadSession response =
+   *       baseBigQueryReadClient.createReadSession(parent, readSession, maxStreamCount);
+   * }
+   * }
+ * * @param parent Required. The request project that owns the session, in the form of * `projects/{project_id}`. * @param readSession Required. Session to be created. @@ -238,6 +272,20 @@ public final ReadSession createReadSession( *

Read sessions automatically expire 24 hours after they are created and do not require manual * clean-up by the caller. * + *

Sample code: + * + *

{@code
+   * try (BaseBigQueryReadClient baseBigQueryReadClient = BaseBigQueryReadClient.create()) {
+   *   CreateReadSessionRequest request =
+   *       CreateReadSessionRequest.newBuilder()
+   *           .setParent(ProjectName.of("[PROJECT]").toString())
+   *           .setReadSession(ReadSession.newBuilder().build())
+   *           .setMaxStreamCount(940837515)
+   *           .build();
+   *   ReadSession response = baseBigQueryReadClient.createReadSession(request);
+   * }
+   * }
+ * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -265,6 +313,21 @@ public final ReadSession createReadSession(CreateReadSessionRequest request) { * clean-up by the caller. * *

Sample code: + * + *

{@code
+   * try (BaseBigQueryReadClient baseBigQueryReadClient = BaseBigQueryReadClient.create()) {
+   *   CreateReadSessionRequest request =
+   *       CreateReadSessionRequest.newBuilder()
+   *           .setParent(ProjectName.of("[PROJECT]").toString())
+   *           .setReadSession(ReadSession.newBuilder().build())
+   *           .setMaxStreamCount(940837515)
+   *           .build();
+   *   ApiFuture future =
+   *       baseBigQueryReadClient.createReadSessionCallable().futureCall(request);
+   *   // Do something.
+   *   ReadSession response = future.get();
+   * }
+   * }
*/ public final UnaryCallable createReadSessionCallable() { return stub.createReadSessionCallable(); @@ -280,6 +343,22 @@ public final UnaryCallable createReadSess * stream. * *

Sample code: + * + *

{@code
+   * try (BaseBigQueryReadClient baseBigQueryReadClient = BaseBigQueryReadClient.create()) {
+   *   ReadRowsRequest request =
+   *       ReadRowsRequest.newBuilder()
+   *           .setReadStream(
+   *               ReadStreamName.of("[PROJECT]", "[LOCATION]", "[SESSION]", "[STREAM]").toString())
+   *           .setOffset(-1019779949)
+   *           .build();
+   *   ServerStream stream =
+   *       baseBigQueryReadClient.readRowsCallable().call(request);
+   *   for (ReadRowsResponse response : stream) {
+   *     // Do something when a response is received.
+   *   }
+   * }
+   * }
*/ public final ServerStreamingCallable readRowsCallable() { return stub.readRowsCallable(); @@ -298,6 +377,20 @@ public final ServerStreamingCallable readRows * original[0-j] = primary[0-j] and original[j-n] = residual[0-m] once the streams have been read * to completion. * + *

Sample code: + * + *

{@code
+   * try (BaseBigQueryReadClient baseBigQueryReadClient = BaseBigQueryReadClient.create()) {
+   *   SplitReadStreamRequest request =
+   *       SplitReadStreamRequest.newBuilder()
+   *           .setName(
+   *               ReadStreamName.of("[PROJECT]", "[LOCATION]", "[SESSION]", "[STREAM]").toString())
+   *           .setFraction(-1653751294)
+   *           .build();
+   *   SplitReadStreamResponse response = baseBigQueryReadClient.splitReadStream(request);
+   * }
+   * }
+ * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -319,6 +412,21 @@ public final SplitReadStreamResponse splitReadStream(SplitReadStreamRequest requ * to completion. * *

Sample code: + * + *

{@code
+   * try (BaseBigQueryReadClient baseBigQueryReadClient = BaseBigQueryReadClient.create()) {
+   *   SplitReadStreamRequest request =
+   *       SplitReadStreamRequest.newBuilder()
+   *           .setName(
+   *               ReadStreamName.of("[PROJECT]", "[LOCATION]", "[SESSION]", "[STREAM]").toString())
+   *           .setFraction(-1653751294)
+   *           .build();
+   *   ApiFuture future =
+   *       baseBigQueryReadClient.splitReadStreamCallable().futureCall(request);
+   *   // Do something.
+   *   SplitReadStreamResponse response = future.get();
+   * }
+   * }
*/ public final UnaryCallable splitReadStreamCallable() { diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/package-info.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/package-info.java index 6530a3c320..b641a52330 100644 --- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/package-info.java +++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/package-info.java @@ -24,6 +24,16 @@ *

The Read API can be used to read data from BigQuery. * *

Sample for BaseBigQueryReadClient: + * + *

{@code
+ * try (BaseBigQueryReadClient baseBigQueryReadClient = BaseBigQueryReadClient.create()) {
+ *   ProjectName parent = ProjectName.of("[PROJECT]");
+ *   ReadSession readSession = ReadSession.newBuilder().build();
+ *   int maxStreamCount = 940837515;
+ *   ReadSession response =
+ *       baseBigQueryReadClient.createReadSession(parent, readSession, maxStreamCount);
+ * }
+ * }
*/ @Generated("by gapic-generator-java") package com.google.cloud.bigquery.storage.v1; diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/stub/BigQueryReadStubSettings.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/stub/BigQueryReadStubSettings.java index f33cd79ea0..8304ae5cee 100644 --- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/stub/BigQueryReadStubSettings.java +++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/stub/BigQueryReadStubSettings.java @@ -66,18 +66,18 @@ *

For example, to set the total timeout of createReadSession to 30 seconds: * *

{@code
- * BigQueryReadStubSettings.Builder bigQueryReadSettingsBuilder =
+ * BigQueryReadStubSettings.Builder baseBigQueryReadSettingsBuilder =
  *     BigQueryReadStubSettings.newBuilder();
- * bigQueryReadSettingsBuilder
+ * baseBigQueryReadSettingsBuilder
  *     .createReadSessionSettings()
  *     .setRetrySettings(
- *         bigQueryReadSettingsBuilder
+ *         baseBigQueryReadSettingsBuilder
  *             .createReadSessionSettings()
  *             .getRetrySettings()
  *             .toBuilder()
  *             .setTotalTimeout(Duration.ofSeconds(30))
  *             .build());
- * BigQueryReadStubSettings bigQueryReadSettings = bigQueryReadSettingsBuilder.build();
+ * BigQueryReadStubSettings baseBigQueryReadSettings = baseBigQueryReadSettingsBuilder.build();
  * }
*/ @Generated("by gapic-generator-java") diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/BigQueryWriteClient.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/BigQueryWriteClient.java index f5e2114ade..85b829740b 100644 --- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/BigQueryWriteClient.java +++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/BigQueryWriteClient.java @@ -35,6 +35,14 @@ *

This class provides the ability to make remote calls to the backing service through method * calls that map to API methods. Sample code to get started: * + *

{@code
+ * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
+ *   TableName parent = TableName.of("[PROJECT]", "[DATASET]", "[TABLE]");
+ *   Stream.WriteStream writeStream = Stream.WriteStream.newBuilder().build();
+ *   Stream.WriteStream response = bigQueryWriteClient.createWriteStream(parent, writeStream);
+ * }
+ * }
+ * *

Note: close() needs to be called on the BigQueryWriteClient object to clean up resources such * as threads. In the example above, try-with-resources is used, which automatically calls close(). * @@ -139,6 +147,16 @@ public BigQueryWriteStub getStub() { /** * Creates a write stream to the given table. * + *

Sample code: + * + *

{@code
+   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
+   *   TableName parent = TableName.of("[PROJECT]", "[DATASET]", "[TABLE]");
+   *   Stream.WriteStream writeStream = Stream.WriteStream.newBuilder().build();
+   *   Stream.WriteStream response = bigQueryWriteClient.createWriteStream(parent, writeStream);
+   * }
+   * }
+ * * @param parent Required. Reference to the table to which the stream belongs, in the format of * `projects/{project}/datasets/{dataset}/tables/{table}`. * @param writeStream Required. Stream to be created. @@ -158,6 +176,16 @@ public final Stream.WriteStream createWriteStream( /** * Creates a write stream to the given table. * + *

Sample code: + * + *

{@code
+   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
+   *   String parent = TableName.of("[PROJECT]", "[DATASET]", "[TABLE]").toString();
+   *   Stream.WriteStream writeStream = Stream.WriteStream.newBuilder().build();
+   *   Stream.WriteStream response = bigQueryWriteClient.createWriteStream(parent, writeStream);
+   * }
+   * }
+ * * @param parent Required. Reference to the table to which the stream belongs, in the format of * `projects/{project}/datasets/{dataset}/tables/{table}`. * @param writeStream Required. Stream to be created. @@ -176,6 +204,18 @@ public final Stream.WriteStream createWriteStream(String parent, Stream.WriteStr /** * Creates a write stream to the given table. * + *

Sample code: + * + *

{@code
+   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
+   *   Storage.CreateWriteStreamRequest request =
+   *       Storage.CreateWriteStreamRequest.newBuilder()
+   *           .setParent(TableName.of("[PROJECT]", "[DATASET]", "[TABLE]").toString())
+   *           .build();
+   *   Stream.WriteStream response = bigQueryWriteClient.createWriteStream(request);
+   * }
+   * }
+ * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -188,6 +228,19 @@ public final Stream.WriteStream createWriteStream(Storage.CreateWriteStreamReque * Creates a write stream to the given table. * *

Sample code: + * + *

{@code
+   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
+   *   Storage.CreateWriteStreamRequest request =
+   *       Storage.CreateWriteStreamRequest.newBuilder()
+   *           .setParent(TableName.of("[PROJECT]", "[DATASET]", "[TABLE]").toString())
+   *           .build();
+   *   ApiFuture future =
+   *       bigQueryWriteClient.createWriteStreamCallable().futureCall(request);
+   *   // Do something.
+   *   Stream.WriteStream response = future.get();
+   * }
+   * }
*/ public final UnaryCallable createWriteStreamCallable() { @@ -214,6 +267,24 @@ public final Stream.WriteStream createWriteStream(Storage.CreateWriteStreamReque * the stream is committed. * *

Sample code: + * + *

{@code
+   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
+   *   BidiStream bidiStream =
+   *       bigQueryWriteClient.appendRowsCallable().call();
+   *   Storage.AppendRowsRequest request =
+   *       Storage.AppendRowsRequest.newBuilder()
+   *           .setWriteStream(
+   *               WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]").toString())
+   *           .setOffset(Int64Value.newBuilder().build())
+   *           .setIgnoreUnknownFields(true)
+   *           .build();
+   *   bidiStream.send(request);
+   *   for (Storage.AppendRowsResponse response : bidiStream) {
+   *     // Do something when a response is received.
+   *   }
+   * }
+   * }
*/ public final BidiStreamingCallable appendRowsCallable() { @@ -224,6 +295,15 @@ public final Stream.WriteStream createWriteStream(Storage.CreateWriteStreamReque /** * Gets a write stream. * + *

Sample code: + * + *

{@code
+   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
+   *   WriteStreamName name = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]");
+   *   Stream.WriteStream response = bigQueryWriteClient.getWriteStream(name);
+   * }
+   * }
+ * * @param name Required. Name of the stream to get, in the form of * `projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}`. * @throws com.google.api.gax.rpc.ApiException if the remote call fails @@ -240,6 +320,15 @@ public final Stream.WriteStream getWriteStream(WriteStreamName name) { /** * Gets a write stream. * + *

Sample code: + * + *

{@code
+   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
+   *   String name = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]").toString();
+   *   Stream.WriteStream response = bigQueryWriteClient.getWriteStream(name);
+   * }
+   * }
+ * * @param name Required. Name of the stream to get, in the form of * `projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}`. * @throws com.google.api.gax.rpc.ApiException if the remote call fails @@ -254,6 +343,19 @@ public final Stream.WriteStream getWriteStream(String name) { /** * Gets a write stream. * + *

Sample code: + * + *

{@code
+   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
+   *   Storage.GetWriteStreamRequest request =
+   *       Storage.GetWriteStreamRequest.newBuilder()
+   *           .setName(
+   *               WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]").toString())
+   *           .build();
+   *   Stream.WriteStream response = bigQueryWriteClient.getWriteStream(request);
+   * }
+   * }
+ * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -266,6 +368,20 @@ public final Stream.WriteStream getWriteStream(Storage.GetWriteStreamRequest req * Gets a write stream. * *

Sample code: + * + *

{@code
+   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
+   *   Storage.GetWriteStreamRequest request =
+   *       Storage.GetWriteStreamRequest.newBuilder()
+   *           .setName(
+   *               WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]").toString())
+   *           .build();
+   *   ApiFuture future =
+   *       bigQueryWriteClient.getWriteStreamCallable().futureCall(request);
+   *   // Do something.
+   *   Stream.WriteStream response = future.get();
+   * }
+   * }
*/ public final UnaryCallable getWriteStreamCallable() { @@ -276,6 +392,15 @@ public final Stream.WriteStream getWriteStream(Storage.GetWriteStreamRequest req /** * Finalize a write stream so that no new data can be appended to the stream. * + *

Sample code: + * + *

{@code
+   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
+   *   WriteStreamName name = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]");
+   *   Storage.FinalizeWriteStreamResponse response = bigQueryWriteClient.finalizeWriteStream(name);
+   * }
+   * }
+ * * @param name Required. Name of the stream to finalize, in the form of * `projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}`. * @throws com.google.api.gax.rpc.ApiException if the remote call fails @@ -292,6 +417,15 @@ public final Storage.FinalizeWriteStreamResponse finalizeWriteStream(WriteStream /** * Finalize a write stream so that no new data can be appended to the stream. * + *

Sample code: + * + *

{@code
+   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
+   *   String name = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]").toString();
+   *   Storage.FinalizeWriteStreamResponse response = bigQueryWriteClient.finalizeWriteStream(name);
+   * }
+   * }
+ * * @param name Required. Name of the stream to finalize, in the form of * `projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}`. * @throws com.google.api.gax.rpc.ApiException if the remote call fails @@ -306,6 +440,20 @@ public final Storage.FinalizeWriteStreamResponse finalizeWriteStream(String name /** * Finalize a write stream so that no new data can be appended to the stream. * + *

Sample code: + * + *

{@code
+   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
+   *   Storage.FinalizeWriteStreamRequest request =
+   *       Storage.FinalizeWriteStreamRequest.newBuilder()
+   *           .setName(
+   *               WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]").toString())
+   *           .build();
+   *   Storage.FinalizeWriteStreamResponse response =
+   *       bigQueryWriteClient.finalizeWriteStream(request);
+   * }
+   * }
+ * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -319,6 +467,20 @@ public final Storage.FinalizeWriteStreamResponse finalizeWriteStream( * Finalize a write stream so that no new data can be appended to the stream. * *

Sample code: + * + *

{@code
+   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
+   *   Storage.FinalizeWriteStreamRequest request =
+   *       Storage.FinalizeWriteStreamRequest.newBuilder()
+   *           .setName(
+   *               WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]").toString())
+   *           .build();
+   *   ApiFuture future =
+   *       bigQueryWriteClient.finalizeWriteStreamCallable().futureCall(request);
+   *   // Do something.
+   *   Storage.FinalizeWriteStreamResponse response = future.get();
+   * }
+   * }
*/ public final UnaryCallable< Storage.FinalizeWriteStreamRequest, Storage.FinalizeWriteStreamResponse> @@ -332,6 +494,16 @@ public final Storage.FinalizeWriteStreamResponse finalizeWriteStream( * must be finalized before commit and cannot be committed multiple times. Once a stream is * committed, data in the stream becomes available for read operations. * + *

Sample code: + * + *

{@code
+   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
+   *   TableName parent = TableName.of("[PROJECT]", "[DATASET]", "[TABLE]");
+   *   Storage.BatchCommitWriteStreamsResponse response =
+   *       bigQueryWriteClient.batchCommitWriteStreams(parent);
+   * }
+   * }
+ * * @param parent Required. Parent table that all the streams should belong to, in the form of * `projects/{project}/datasets/{dataset}/tables/{table}`. * @throws com.google.api.gax.rpc.ApiException if the remote call fails @@ -350,6 +522,16 @@ public final Storage.BatchCommitWriteStreamsResponse batchCommitWriteStreams(Tab * must be finalized before commit and cannot be committed multiple times. Once a stream is * committed, data in the stream becomes available for read operations. * + *

Sample code: + * + *

{@code
+   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
+   *   String parent = TableName.of("[PROJECT]", "[DATASET]", "[TABLE]").toString();
+   *   Storage.BatchCommitWriteStreamsResponse response =
+   *       bigQueryWriteClient.batchCommitWriteStreams(parent);
+   * }
+   * }
+ * * @param parent Required. Parent table that all the streams should belong to, in the form of * `projects/{project}/datasets/{dataset}/tables/{table}`. * @throws com.google.api.gax.rpc.ApiException if the remote call fails @@ -366,6 +548,20 @@ public final Storage.BatchCommitWriteStreamsResponse batchCommitWriteStreams(Str * must be finalized before commit and cannot be committed multiple times. Once a stream is * committed, data in the stream becomes available for read operations. * + *

Sample code: + * + *

{@code
+   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
+   *   Storage.BatchCommitWriteStreamsRequest request =
+   *       Storage.BatchCommitWriteStreamsRequest.newBuilder()
+   *           .setParent(TableName.of("[PROJECT]", "[DATASET]", "[TABLE]").toString())
+   *           .addAllWriteStreams(new ArrayList())
+   *           .build();
+   *   Storage.BatchCommitWriteStreamsResponse response =
+   *       bigQueryWriteClient.batchCommitWriteStreams(request);
+   * }
+   * }
+ * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -381,6 +577,20 @@ public final Storage.BatchCommitWriteStreamsResponse batchCommitWriteStreams( * committed, data in the stream becomes available for read operations. * *

Sample code: + * + *

{@code
+   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
+   *   Storage.BatchCommitWriteStreamsRequest request =
+   *       Storage.BatchCommitWriteStreamsRequest.newBuilder()
+   *           .setParent(TableName.of("[PROJECT]", "[DATASET]", "[TABLE]").toString())
+   *           .addAllWriteStreams(new ArrayList())
+   *           .build();
+   *   ApiFuture future =
+   *       bigQueryWriteClient.batchCommitWriteStreamsCallable().futureCall(request);
+   *   // Do something.
+   *   Storage.BatchCommitWriteStreamsResponse response = future.get();
+   * }
+   * }
*/ public final UnaryCallable< Storage.BatchCommitWriteStreamsRequest, Storage.BatchCommitWriteStreamsResponse> @@ -395,6 +605,16 @@ public final Storage.BatchCommitWriteStreamsResponse batchCommitWriteStreams( * flushes up to any previously flushed offset in a BUFFERED stream, to the offset specified in * the request. * + *

Sample code: + * + *

{@code
+   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
+   *   WriteStreamName writeStream =
+   *       WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]");
+   *   Storage.FlushRowsResponse response = bigQueryWriteClient.flushRows(writeStream);
+   * }
+   * }
+ * * @param writeStream Required. The stream that is the target of the flush operation. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -413,6 +633,16 @@ public final Storage.FlushRowsResponse flushRows(WriteStreamName writeStream) { * flushes up to any previously flushed offset in a BUFFERED stream, to the offset specified in * the request. * + *

Sample code: + * + *

{@code
+   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
+   *   String writeStream =
+   *       WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]").toString();
+   *   Storage.FlushRowsResponse response = bigQueryWriteClient.flushRows(writeStream);
+   * }
+   * }
+ * * @param writeStream Required. The stream that is the target of the flush operation. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -429,6 +659,20 @@ public final Storage.FlushRowsResponse flushRows(String writeStream) { * flushes up to any previously flushed offset in a BUFFERED stream, to the offset specified in * the request. * + *

Sample code: + * + *

{@code
+   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
+   *   Storage.FlushRowsRequest request =
+   *       Storage.FlushRowsRequest.newBuilder()
+   *           .setWriteStream(
+   *               WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]").toString())
+   *           .setOffset(-1019779949)
+   *           .build();
+   *   Storage.FlushRowsResponse response = bigQueryWriteClient.flushRows(request);
+   * }
+   * }
+ * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -444,6 +688,21 @@ public final Storage.FlushRowsResponse flushRows(Storage.FlushRowsRequest reques * the request. * *

Sample code: + * + *

{@code
+   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
+   *   Storage.FlushRowsRequest request =
+   *       Storage.FlushRowsRequest.newBuilder()
+   *           .setWriteStream(
+   *               WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]").toString())
+   *           .setOffset(-1019779949)
+   *           .build();
+   *   ApiFuture future =
+   *       bigQueryWriteClient.flushRowsCallable().futureCall(request);
+   *   // Do something.
+   *   Storage.FlushRowsResponse response = future.get();
+   * }
+   * }
*/ public final UnaryCallable flushRowsCallable() { diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/package-info.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/package-info.java index 561987d3b3..d551efac3d 100644 --- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/package-info.java +++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/package-info.java @@ -24,6 +24,14 @@ *

The Write API can be used to write data to BigQuery. * *

Sample for BigQueryWriteClient: + * + *

{@code
+ * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
+ *   TableName parent = TableName.of("[PROJECT]", "[DATASET]", "[TABLE]");
+ *   Stream.WriteStream writeStream = Stream.WriteStream.newBuilder().build();
+ *   Stream.WriteStream response = bigQueryWriteClient.createWriteStream(parent, writeStream);
+ * }
+ * }
*/ @Generated("by gapic-generator-java") package com.google.cloud.bigquery.storage.v1alpha2; diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta1/BaseBigQueryStorageClient.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta1/BaseBigQueryStorageClient.java index 7409ee28ff..b2595cada9 100644 --- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta1/BaseBigQueryStorageClient.java +++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta1/BaseBigQueryStorageClient.java @@ -36,6 +36,17 @@ *

This class provides the ability to make remote calls to the backing service through method * calls that map to API methods. Sample code to get started: * + *

{@code
+ * try (BaseBigQueryStorageClient baseBigQueryStorageClient = BaseBigQueryStorageClient.create()) {
+ *   TableReferenceProto.TableReference tableReference =
+ *       TableReferenceProto.TableReference.newBuilder().build();
+ *   ProjectName parent = ProjectName.of("[PROJECT]");
+ *   int requestedStreams = 1017221410;
+ *   Storage.ReadSession response =
+ *       baseBigQueryStorageClient.createReadSession(tableReference, parent, requestedStreams);
+ * }
+ * }
+ * *

Note: close() needs to be called on the BaseBigQueryStorageClient object to clean up resources * such as threads. In the example above, try-with-resources is used, which automatically calls * close(). @@ -152,6 +163,19 @@ public BigQueryStorageStub getStub() { *

Read sessions automatically expire 24 hours after they are created and do not require manual * clean-up by the caller. * + *

Sample code: + * + *

{@code
+   * try (BaseBigQueryStorageClient baseBigQueryStorageClient = BaseBigQueryStorageClient.create()) {
+   *   TableReferenceProto.TableReference tableReference =
+   *       TableReferenceProto.TableReference.newBuilder().build();
+   *   ProjectName parent = ProjectName.of("[PROJECT]");
+   *   int requestedStreams = 1017221410;
+   *   Storage.ReadSession response =
+   *       baseBigQueryStorageClient.createReadSession(tableReference, parent, requestedStreams);
+   * }
+   * }
+ * * @param tableReference Required. Reference to the table to read. * @param parent Required. String of the form `projects/{project_id}` indicating the project this * ReadSession is associated with. This is the project that will be billed for usage. @@ -186,6 +210,19 @@ public final Storage.ReadSession createReadSession( *

Read sessions automatically expire 24 hours after they are created and do not require manual * clean-up by the caller. * + *

Sample code: + * + *

{@code
+   * try (BaseBigQueryStorageClient baseBigQueryStorageClient = BaseBigQueryStorageClient.create()) {
+   *   TableReferenceProto.TableReference tableReference =
+   *       TableReferenceProto.TableReference.newBuilder().build();
+   *   String parent = ProjectName.of("[PROJECT]").toString();
+   *   int requestedStreams = 1017221410;
+   *   Storage.ReadSession response =
+   *       baseBigQueryStorageClient.createReadSession(tableReference, parent, requestedStreams);
+   * }
+   * }
+ * * @param tableReference Required. Reference to the table to read. * @param parent Required. String of the form `projects/{project_id}` indicating the project this * ReadSession is associated with. This is the project that will be billed for usage. @@ -220,6 +257,19 @@ public final Storage.ReadSession createReadSession( *

Read sessions automatically expire 24 hours after they are created and do not require manual * clean-up by the caller. * + *

Sample code: + * + *

{@code
+   * try (BaseBigQueryStorageClient baseBigQueryStorageClient = BaseBigQueryStorageClient.create()) {
+   *   Storage.CreateReadSessionRequest request =
+   *       Storage.CreateReadSessionRequest.newBuilder()
+   *           .setParent(ProjectName.of("[PROJECT]").toString())
+   *           .setRequestedStreams(1017221410)
+   *           .build();
+   *   Storage.ReadSession response = baseBigQueryStorageClient.createReadSession(request);
+   * }
+   * }
+ * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -241,6 +291,20 @@ public final Storage.ReadSession createReadSession(Storage.CreateReadSessionRequ * clean-up by the caller. * *

Sample code: + * + *

{@code
+   * try (BaseBigQueryStorageClient baseBigQueryStorageClient = BaseBigQueryStorageClient.create()) {
+   *   Storage.CreateReadSessionRequest request =
+   *       Storage.CreateReadSessionRequest.newBuilder()
+   *           .setParent(ProjectName.of("[PROJECT]").toString())
+   *           .setRequestedStreams(1017221410)
+   *           .build();
+   *   ApiFuture future =
+   *       baseBigQueryStorageClient.createReadSessionCallable().futureCall(request);
+   *   // Do something.
+   *   Storage.ReadSession response = future.get();
+   * }
+   * }
*/ public final UnaryCallable createReadSessionCallable() { @@ -259,6 +323,17 @@ public final Storage.ReadSession createReadSession(Storage.CreateReadSessionRequ * data. * *

Sample code: + * + *

{@code
+   * try (BaseBigQueryStorageClient baseBigQueryStorageClient = BaseBigQueryStorageClient.create()) {
+   *   Storage.ReadRowsRequest request = Storage.ReadRowsRequest.newBuilder().build();
+   *   ServerStream stream =
+   *       baseBigQueryStorageClient.readRowsCallable().call(request);
+   *   for (Storage.ReadRowsResponse response : stream) {
+   *     // Do something when a response is received.
+   *   }
+   * }
+   * }
*/ public final ServerStreamingCallable readRowsCallable() { @@ -270,6 +345,17 @@ public final Storage.ReadSession createReadSession(Storage.CreateReadSessionRequ * Creates additional streams for a ReadSession. This API can be used to dynamically adjust the * parallelism of a batch processing task upwards by adding additional workers. * + *

Sample code: + * + *

{@code
+   * try (BaseBigQueryStorageClient baseBigQueryStorageClient = BaseBigQueryStorageClient.create()) {
+   *   Storage.ReadSession session = Storage.ReadSession.newBuilder().build();
+   *   int requestedStreams = 1017221410;
+   *   Storage.BatchCreateReadSessionStreamsResponse response =
+   *       baseBigQueryStorageClient.batchCreateReadSessionStreams(session, requestedStreams);
+   * }
+   * }
+ * * @param session Required. Must be a non-expired session obtained from a call to * CreateReadSession. Only the name field needs to be set. * @param requestedStreams Required. Number of new streams requested. Must be positive. Number of @@ -291,6 +377,19 @@ public final Storage.BatchCreateReadSessionStreamsResponse batchCreateReadSessio * Creates additional streams for a ReadSession. This API can be used to dynamically adjust the * parallelism of a batch processing task upwards by adding additional workers. * + *

Sample code: + * + *

{@code
+   * try (BaseBigQueryStorageClient baseBigQueryStorageClient = BaseBigQueryStorageClient.create()) {
+   *   Storage.BatchCreateReadSessionStreamsRequest request =
+   *       Storage.BatchCreateReadSessionStreamsRequest.newBuilder()
+   *           .setRequestedStreams(1017221410)
+   *           .build();
+   *   Storage.BatchCreateReadSessionStreamsResponse response =
+   *       baseBigQueryStorageClient.batchCreateReadSessionStreams(request);
+   * }
+   * }
+ * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -305,6 +404,19 @@ public final Storage.BatchCreateReadSessionStreamsResponse batchCreateReadSessio * parallelism of a batch processing task upwards by adding additional workers. * *

Sample code: + * + *

{@code
+   * try (BaseBigQueryStorageClient baseBigQueryStorageClient = BaseBigQueryStorageClient.create()) {
+   *   Storage.BatchCreateReadSessionStreamsRequest request =
+   *       Storage.BatchCreateReadSessionStreamsRequest.newBuilder()
+   *           .setRequestedStreams(1017221410)
+   *           .build();
+   *   ApiFuture future =
+   *       baseBigQueryStorageClient.batchCreateReadSessionStreamsCallable().futureCall(request);
+   *   // Do something.
+   *   Storage.BatchCreateReadSessionStreamsResponse response = future.get();
+   * }
+   * }
*/ public final UnaryCallable< Storage.BatchCreateReadSessionStreamsRequest, @@ -327,6 +439,15 @@ public final Storage.BatchCreateReadSessionStreamsResponse batchCreateReadSessio *

This method will return an error if there are no other live streams in the Session, or if * SplitReadStream() has been called on the given Stream. * + *

Sample code: + * + *

{@code
+   * try (BaseBigQueryStorageClient baseBigQueryStorageClient = BaseBigQueryStorageClient.create()) {
+   *   Storage.Stream stream = Storage.Stream.newBuilder().build();
+   *   baseBigQueryStorageClient.finalizeStream(stream);
+   * }
+   * }
+ * * @param stream Required. Stream to finalize. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -350,6 +471,15 @@ public final void finalizeStream(Storage.Stream stream) { *

This method will return an error if there are no other live streams in the Session, or if * SplitReadStream() has been called on the given Stream. * + *

Sample code: + * + *

{@code
+   * try (BaseBigQueryStorageClient baseBigQueryStorageClient = BaseBigQueryStorageClient.create()) {
+   *   Storage.FinalizeStreamRequest request = Storage.FinalizeStreamRequest.newBuilder().build();
+   *   baseBigQueryStorageClient.finalizeStream(request);
+   * }
+   * }
+ * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -372,6 +502,16 @@ public final void finalizeStream(Storage.FinalizeStreamRequest request) { * SplitReadStream() has been called on the given Stream. * *

Sample code: + * + *

{@code
+   * try (BaseBigQueryStorageClient baseBigQueryStorageClient = BaseBigQueryStorageClient.create()) {
+   *   Storage.FinalizeStreamRequest request = Storage.FinalizeStreamRequest.newBuilder().build();
+   *   ApiFuture future =
+   *       baseBigQueryStorageClient.finalizeStreamCallable().futureCall(request);
+   *   // Do something.
+   *   future.get();
+   * }
+   * }
*/ public final UnaryCallable finalizeStreamCallable() { return stub.finalizeStreamCallable(); @@ -391,6 +531,16 @@ public final UnaryCallable finalizeStreamC * *

This method is guaranteed to be idempotent. * + *

Sample code: + * + *

{@code
+   * try (BaseBigQueryStorageClient baseBigQueryStorageClient = BaseBigQueryStorageClient.create()) {
+   *   Storage.Stream originalStream = Storage.Stream.newBuilder().build();
+   *   Storage.SplitReadStreamResponse response =
+   *       baseBigQueryStorageClient.splitReadStream(originalStream);
+   * }
+   * }
+ * * @param originalStream Required. Stream to split. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -414,6 +564,16 @@ public final Storage.SplitReadStreamResponse splitReadStream(Storage.Stream orig * *

This method is guaranteed to be idempotent. * + *

Sample code: + * + *

{@code
+   * try (BaseBigQueryStorageClient baseBigQueryStorageClient = BaseBigQueryStorageClient.create()) {
+   *   Storage.SplitReadStreamRequest request =
+   *       Storage.SplitReadStreamRequest.newBuilder().setFraction(-1653751294).build();
+   *   Storage.SplitReadStreamResponse response = baseBigQueryStorageClient.splitReadStream(request);
+   * }
+   * }
+ * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -437,6 +597,17 @@ public final Storage.SplitReadStreamResponse splitReadStream( *

This method is guaranteed to be idempotent. * *

Sample code: + * + *

{@code
+   * try (BaseBigQueryStorageClient baseBigQueryStorageClient = BaseBigQueryStorageClient.create()) {
+   *   Storage.SplitReadStreamRequest request =
+   *       Storage.SplitReadStreamRequest.newBuilder().setFraction(-1653751294).build();
+   *   ApiFuture future =
+   *       baseBigQueryStorageClient.splitReadStreamCallable().futureCall(request);
+   *   // Do something.
+   *   Storage.SplitReadStreamResponse response = future.get();
+   * }
+   * }
*/ public final UnaryCallable splitReadStreamCallable() { diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta1/package-info.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta1/package-info.java index 00d84fd6a9..fef267f88e 100644 --- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta1/package-info.java +++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta1/package-info.java @@ -24,6 +24,17 @@ *

The BigQuery storage API can be used to read data stored in BigQuery. * *

Sample for BaseBigQueryStorageClient: + * + *

{@code
+ * try (BaseBigQueryStorageClient baseBigQueryStorageClient = BaseBigQueryStorageClient.create()) {
+ *   TableReferenceProto.TableReference tableReference =
+ *       TableReferenceProto.TableReference.newBuilder().build();
+ *   ProjectName parent = ProjectName.of("[PROJECT]");
+ *   int requestedStreams = 1017221410;
+ *   Storage.ReadSession response =
+ *       baseBigQueryStorageClient.createReadSession(tableReference, parent, requestedStreams);
+ * }
+ * }
*/ @Generated("by gapic-generator-java") package com.google.cloud.bigquery.storage.v1beta1; diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta1/stub/BigQueryStorageStubSettings.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta1/stub/BigQueryStorageStubSettings.java index 5225c25b98..2fd2a9ffa6 100644 --- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta1/stub/BigQueryStorageStubSettings.java +++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta1/stub/BigQueryStorageStubSettings.java @@ -62,18 +62,19 @@ *

For example, to set the total timeout of createReadSession to 30 seconds: * *

{@code
- * BigQueryStorageStubSettings.Builder bigQueryStorageSettingsBuilder =
+ * BigQueryStorageStubSettings.Builder baseBigQueryStorageSettingsBuilder =
  *     BigQueryStorageStubSettings.newBuilder();
- * bigQueryStorageSettingsBuilder
+ * baseBigQueryStorageSettingsBuilder
  *     .createReadSessionSettings()
  *     .setRetrySettings(
- *         bigQueryStorageSettingsBuilder
+ *         baseBigQueryStorageSettingsBuilder
  *             .createReadSessionSettings()
  *             .getRetrySettings()
  *             .toBuilder()
  *             .setTotalTimeout(Duration.ofSeconds(30))
  *             .build());
- * BigQueryStorageStubSettings bigQueryStorageSettings = bigQueryStorageSettingsBuilder.build();
+ * BigQueryStorageStubSettings baseBigQueryStorageSettings =
+ *     baseBigQueryStorageSettingsBuilder.build();
  * }
*/ @BetaApi diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/BaseBigQueryReadClient.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/BaseBigQueryReadClient.java index 41d9b173d1..6970e4e7b2 100644 --- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/BaseBigQueryReadClient.java +++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/BaseBigQueryReadClient.java @@ -38,6 +38,16 @@ *

This class provides the ability to make remote calls to the backing service through method * calls that map to API methods. Sample code to get started: * + *

{@code
+ * try (BaseBigQueryReadClient baseBigQueryReadClient = BaseBigQueryReadClient.create()) {
+ *   ProjectName parent = ProjectName.of("[PROJECT]");
+ *   ReadSession readSession = ReadSession.newBuilder().build();
+ *   int maxStreamCount = 940837515;
+ *   ReadSession response =
+ *       baseBigQueryReadClient.createReadSession(parent, readSession, maxStreamCount);
+ * }
+ * }
+ * *

Note: close() needs to be called on the BaseBigQueryReadClient object to clean up resources * such as threads. In the example above, try-with-resources is used, which automatically calls * close(). @@ -160,6 +170,18 @@ public BigQueryReadStub getStub() { *

Read sessions automatically expire 24 hours after they are created and do not require manual * clean-up by the caller. * + *

Sample code: + * + *

{@code
+   * try (BaseBigQueryReadClient baseBigQueryReadClient = BaseBigQueryReadClient.create()) {
+   *   ProjectName parent = ProjectName.of("[PROJECT]");
+   *   ReadSession readSession = ReadSession.newBuilder().build();
+   *   int maxStreamCount = 940837515;
+   *   ReadSession response =
+   *       baseBigQueryReadClient.createReadSession(parent, readSession, maxStreamCount);
+   * }
+   * }
+ * * @param parent Required. The request project that owns the session, in the form of * `projects/{project_id}`. * @param readSession Required. Session to be created. @@ -201,6 +223,18 @@ public final ReadSession createReadSession( *

Read sessions automatically expire 24 hours after they are created and do not require manual * clean-up by the caller. * + *

Sample code: + * + *

{@code
+   * try (BaseBigQueryReadClient baseBigQueryReadClient = BaseBigQueryReadClient.create()) {
+   *   String parent = ProjectName.of("[PROJECT]").toString();
+   *   ReadSession readSession = ReadSession.newBuilder().build();
+   *   int maxStreamCount = 940837515;
+   *   ReadSession response =
+   *       baseBigQueryReadClient.createReadSession(parent, readSession, maxStreamCount);
+   * }
+   * }
+ * * @param parent Required. The request project that owns the session, in the form of * `projects/{project_id}`. * @param readSession Required. Session to be created. @@ -242,6 +276,20 @@ public final ReadSession createReadSession( *

Read sessions automatically expire 24 hours after they are created and do not require manual * clean-up by the caller. * + *

Sample code: + * + *

{@code
+   * try (BaseBigQueryReadClient baseBigQueryReadClient = BaseBigQueryReadClient.create()) {
+   *   CreateReadSessionRequest request =
+   *       CreateReadSessionRequest.newBuilder()
+   *           .setParent(ProjectName.of("[PROJECT]").toString())
+   *           .setReadSession(ReadSession.newBuilder().build())
+   *           .setMaxStreamCount(940837515)
+   *           .build();
+   *   ReadSession response = baseBigQueryReadClient.createReadSession(request);
+   * }
+   * }
+ * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -269,6 +317,21 @@ public final ReadSession createReadSession(CreateReadSessionRequest request) { * clean-up by the caller. * *

Sample code: + * + *

{@code
+   * try (BaseBigQueryReadClient baseBigQueryReadClient = BaseBigQueryReadClient.create()) {
+   *   CreateReadSessionRequest request =
+   *       CreateReadSessionRequest.newBuilder()
+   *           .setParent(ProjectName.of("[PROJECT]").toString())
+   *           .setReadSession(ReadSession.newBuilder().build())
+   *           .setMaxStreamCount(940837515)
+   *           .build();
+   *   ApiFuture future =
+   *       baseBigQueryReadClient.createReadSessionCallable().futureCall(request);
+   *   // Do something.
+   *   ReadSession response = future.get();
+   * }
+   * }
*/ public final UnaryCallable createReadSessionCallable() { return stub.createReadSessionCallable(); @@ -284,6 +347,22 @@ public final UnaryCallable createReadSess * stream. * *

Sample code: + * + *

{@code
+   * try (BaseBigQueryReadClient baseBigQueryReadClient = BaseBigQueryReadClient.create()) {
+   *   ReadRowsRequest request =
+   *       ReadRowsRequest.newBuilder()
+   *           .setReadStream(
+   *               ReadStreamName.of("[PROJECT]", "[LOCATION]", "[SESSION]", "[STREAM]").toString())
+   *           .setOffset(-1019779949)
+   *           .build();
+   *   ServerStream stream =
+   *       baseBigQueryReadClient.readRowsCallable().call(request);
+   *   for (ReadRowsResponse response : stream) {
+   *     // Do something when a response is received.
+   *   }
+   * }
+   * }
*/ public final ServerStreamingCallable readRowsCallable() { return stub.readRowsCallable(); @@ -302,6 +381,20 @@ public final ServerStreamingCallable readRows * original[0-j] = primary[0-j] and original[j-n] = residual[0-m] once the streams have been read * to completion. * + *

Sample code: + * + *

{@code
+   * try (BaseBigQueryReadClient baseBigQueryReadClient = BaseBigQueryReadClient.create()) {
+   *   SplitReadStreamRequest request =
+   *       SplitReadStreamRequest.newBuilder()
+   *           .setName(
+   *               ReadStreamName.of("[PROJECT]", "[LOCATION]", "[SESSION]", "[STREAM]").toString())
+   *           .setFraction(-1653751294)
+   *           .build();
+   *   SplitReadStreamResponse response = baseBigQueryReadClient.splitReadStream(request);
+   * }
+   * }
+ * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -323,6 +416,21 @@ public final SplitReadStreamResponse splitReadStream(SplitReadStreamRequest requ * to completion. * *

Sample code: + * + *

{@code
+   * try (BaseBigQueryReadClient baseBigQueryReadClient = BaseBigQueryReadClient.create()) {
+   *   SplitReadStreamRequest request =
+   *       SplitReadStreamRequest.newBuilder()
+   *           .setName(
+   *               ReadStreamName.of("[PROJECT]", "[LOCATION]", "[SESSION]", "[STREAM]").toString())
+   *           .setFraction(-1653751294)
+   *           .build();
+   *   ApiFuture future =
+   *       baseBigQueryReadClient.splitReadStreamCallable().futureCall(request);
+   *   // Do something.
+   *   SplitReadStreamResponse response = future.get();
+   * }
+   * }
*/ public final UnaryCallable splitReadStreamCallable() { diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/BigQueryWriteClient.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/BigQueryWriteClient.java index a2e5cd8ef6..db904ff13b 100644 --- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/BigQueryWriteClient.java +++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/BigQueryWriteClient.java @@ -35,6 +35,14 @@ *

This class provides the ability to make remote calls to the backing service through method * calls that map to API methods. Sample code to get started: * + *

{@code
+ * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
+ *   TableName parent = TableName.of("[PROJECT]", "[DATASET]", "[TABLE]");
+ *   WriteStream writeStream = WriteStream.newBuilder().build();
+ *   WriteStream response = bigQueryWriteClient.createWriteStream(parent, writeStream);
+ * }
+ * }
+ * *

Note: close() needs to be called on the BigQueryWriteClient object to clean up resources such * as threads. In the example above, try-with-resources is used, which automatically calls close(). * @@ -143,6 +151,16 @@ public BigQueryWriteStub getStub() { * clients. Data written to this stream is considered committed as soon as an acknowledgement is * received. * + *

Sample code: + * + *

{@code
+   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
+   *   TableName parent = TableName.of("[PROJECT]", "[DATASET]", "[TABLE]");
+   *   WriteStream writeStream = WriteStream.newBuilder().build();
+   *   WriteStream response = bigQueryWriteClient.createWriteStream(parent, writeStream);
+   * }
+   * }
+ * * @param parent Required. Reference to the table to which the stream belongs, in the format of * `projects/{project}/datasets/{dataset}/tables/{table}`. * @param writeStream Required. Stream to be created. @@ -165,6 +183,16 @@ public final WriteStream createWriteStream(TableName parent, WriteStream writeSt * clients. Data written to this stream is considered committed as soon as an acknowledgement is * received. * + *

Sample code: + * + *

{@code
+   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
+   *   String parent = TableName.of("[PROJECT]", "[DATASET]", "[TABLE]").toString();
+   *   WriteStream writeStream = WriteStream.newBuilder().build();
+   *   WriteStream response = bigQueryWriteClient.createWriteStream(parent, writeStream);
+   * }
+   * }
+ * * @param parent Required. Reference to the table to which the stream belongs, in the format of * `projects/{project}/datasets/{dataset}/tables/{table}`. * @param writeStream Required. Stream to be created. @@ -184,6 +212,19 @@ public final WriteStream createWriteStream(String parent, WriteStream writeStrea * clients. Data written to this stream is considered committed as soon as an acknowledgement is * received. * + *

Sample code: + * + *

{@code
+   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
+   *   CreateWriteStreamRequest request =
+   *       CreateWriteStreamRequest.newBuilder()
+   *           .setParent(TableName.of("[PROJECT]", "[DATASET]", "[TABLE]").toString())
+   *           .setWriteStream(WriteStream.newBuilder().build())
+   *           .build();
+   *   WriteStream response = bigQueryWriteClient.createWriteStream(request);
+   * }
+   * }
+ * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -200,6 +241,20 @@ public final WriteStream createWriteStream(CreateWriteStreamRequest request) { * received. * *

Sample code: + * + *

{@code
+   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
+   *   CreateWriteStreamRequest request =
+   *       CreateWriteStreamRequest.newBuilder()
+   *           .setParent(TableName.of("[PROJECT]", "[DATASET]", "[TABLE]").toString())
+   *           .setWriteStream(WriteStream.newBuilder().build())
+   *           .build();
+   *   ApiFuture future =
+   *       bigQueryWriteClient.createWriteStreamCallable().futureCall(request);
+   *   // Do something.
+   *   WriteStream response = future.get();
+   * }
+   * }
*/ public final UnaryCallable createWriteStreamCallable() { return stub.createWriteStreamCallable(); @@ -225,6 +280,24 @@ public final UnaryCallable createWriteStr * the stream is committed. * *

Sample code: + * + *

{@code
+   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
+   *   BidiStream bidiStream =
+   *       bigQueryWriteClient.appendRowsCallable().call();
+   *   AppendRowsRequest request =
+   *       AppendRowsRequest.newBuilder()
+   *           .setWriteStream(
+   *               WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]").toString())
+   *           .setOffset(Int64Value.newBuilder().build())
+   *           .setTraceId("traceId-1067401920")
+   *           .build();
+   *   bidiStream.send(request);
+   *   for (AppendRowsResponse response : bidiStream) {
+   *     // Do something when a response is received.
+   *   }
+   * }
+   * }
*/ public final BidiStreamingCallable appendRowsCallable() { return stub.appendRowsCallable(); @@ -234,6 +307,15 @@ public final BidiStreamingCallable append /** * Gets a write stream. * + *

Sample code: + * + *

{@code
+   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
+   *   WriteStreamName name = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]");
+   *   WriteStream response = bigQueryWriteClient.getWriteStream(name);
+   * }
+   * }
+ * * @param name Required. Name of the stream to get, in the form of * `projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}`. * @throws com.google.api.gax.rpc.ApiException if the remote call fails @@ -248,6 +330,15 @@ public final WriteStream getWriteStream(WriteStreamName name) { /** * Gets a write stream. * + *

Sample code: + * + *

{@code
+   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
+   *   String name = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]").toString();
+   *   WriteStream response = bigQueryWriteClient.getWriteStream(name);
+   * }
+   * }
+ * * @param name Required. Name of the stream to get, in the form of * `projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}`. * @throws com.google.api.gax.rpc.ApiException if the remote call fails @@ -261,6 +352,19 @@ public final WriteStream getWriteStream(String name) { /** * Gets a write stream. * + *

Sample code: + * + *

{@code
+   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
+   *   GetWriteStreamRequest request =
+   *       GetWriteStreamRequest.newBuilder()
+   *           .setName(
+   *               WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]").toString())
+   *           .build();
+   *   WriteStream response = bigQueryWriteClient.getWriteStream(request);
+   * }
+   * }
+ * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -273,6 +377,20 @@ public final WriteStream getWriteStream(GetWriteStreamRequest request) { * Gets a write stream. * *

Sample code: + * + *

{@code
+   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
+   *   GetWriteStreamRequest request =
+   *       GetWriteStreamRequest.newBuilder()
+   *           .setName(
+   *               WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]").toString())
+   *           .build();
+   *   ApiFuture future =
+   *       bigQueryWriteClient.getWriteStreamCallable().futureCall(request);
+   *   // Do something.
+   *   WriteStream response = future.get();
+   * }
+   * }
*/ public final UnaryCallable getWriteStreamCallable() { return stub.getWriteStreamCallable(); @@ -283,6 +401,15 @@ public final UnaryCallable getWriteStreamCal * Finalize a write stream so that no new data can be appended to the stream. Finalize is not * supported on the '_default' stream. * + *

Sample code: + * + *

{@code
+   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
+   *   WriteStreamName name = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]");
+   *   FinalizeWriteStreamResponse response = bigQueryWriteClient.finalizeWriteStream(name);
+   * }
+   * }
+ * * @param name Required. Name of the stream to finalize, in the form of * `projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}`. * @throws com.google.api.gax.rpc.ApiException if the remote call fails @@ -300,6 +427,15 @@ public final FinalizeWriteStreamResponse finalizeWriteStream(WriteStreamName nam * Finalize a write stream so that no new data can be appended to the stream. Finalize is not * supported on the '_default' stream. * + *

Sample code: + * + *

{@code
+   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
+   *   String name = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]").toString();
+   *   FinalizeWriteStreamResponse response = bigQueryWriteClient.finalizeWriteStream(name);
+   * }
+   * }
+ * * @param name Required. Name of the stream to finalize, in the form of * `projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}`. * @throws com.google.api.gax.rpc.ApiException if the remote call fails @@ -315,6 +451,19 @@ public final FinalizeWriteStreamResponse finalizeWriteStream(String name) { * Finalize a write stream so that no new data can be appended to the stream. Finalize is not * supported on the '_default' stream. * + *

Sample code: + * + *

{@code
+   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
+   *   FinalizeWriteStreamRequest request =
+   *       FinalizeWriteStreamRequest.newBuilder()
+   *           .setName(
+   *               WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]").toString())
+   *           .build();
+   *   FinalizeWriteStreamResponse response = bigQueryWriteClient.finalizeWriteStream(request);
+   * }
+   * }
+ * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -328,6 +477,20 @@ public final FinalizeWriteStreamResponse finalizeWriteStream(FinalizeWriteStream * supported on the '_default' stream. * *

Sample code: + * + *

{@code
+   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
+   *   FinalizeWriteStreamRequest request =
+   *       FinalizeWriteStreamRequest.newBuilder()
+   *           .setName(
+   *               WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]").toString())
+   *           .build();
+   *   ApiFuture future =
+   *       bigQueryWriteClient.finalizeWriteStreamCallable().futureCall(request);
+   *   // Do something.
+   *   FinalizeWriteStreamResponse response = future.get();
+   * }
+   * }
*/ public final UnaryCallable finalizeWriteStreamCallable() { @@ -340,6 +503,16 @@ public final FinalizeWriteStreamResponse finalizeWriteStream(FinalizeWriteStream * must be finalized before commit and cannot be committed multiple times. Once a stream is * committed, data in the stream becomes available for read operations. * + *

Sample code: + * + *

{@code
+   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
+   *   String parent = "parent-995424086";
+   *   BatchCommitWriteStreamsResponse response =
+   *       bigQueryWriteClient.batchCommitWriteStreams(parent);
+   * }
+   * }
+ * * @param parent Required. Parent table that all the streams should belong to, in the form of * `projects/{project}/datasets/{dataset}/tables/{table}`. * @throws com.google.api.gax.rpc.ApiException if the remote call fails @@ -356,6 +529,20 @@ public final BatchCommitWriteStreamsResponse batchCommitWriteStreams(String pare * must be finalized before commit and cannot be committed multiple times. Once a stream is * committed, data in the stream becomes available for read operations. * + *

Sample code: + * + *

{@code
+   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
+   *   BatchCommitWriteStreamsRequest request =
+   *       BatchCommitWriteStreamsRequest.newBuilder()
+   *           .setParent("parent-995424086")
+   *           .addAllWriteStreams(new ArrayList())
+   *           .build();
+   *   BatchCommitWriteStreamsResponse response =
+   *       bigQueryWriteClient.batchCommitWriteStreams(request);
+   * }
+   * }
+ * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -371,6 +558,20 @@ public final BatchCommitWriteStreamsResponse batchCommitWriteStreams( * committed, data in the stream becomes available for read operations. * *

Sample code: + * + *

{@code
+   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
+   *   BatchCommitWriteStreamsRequest request =
+   *       BatchCommitWriteStreamsRequest.newBuilder()
+   *           .setParent("parent-995424086")
+   *           .addAllWriteStreams(new ArrayList())
+   *           .build();
+   *   ApiFuture future =
+   *       bigQueryWriteClient.batchCommitWriteStreamsCallable().futureCall(request);
+   *   // Do something.
+   *   BatchCommitWriteStreamsResponse response = future.get();
+   * }
+   * }
*/ public final UnaryCallable batchCommitWriteStreamsCallable() { @@ -384,6 +585,16 @@ public final BatchCommitWriteStreamsResponse batchCommitWriteStreams( * flushes up to any previously flushed offset in a BUFFERED stream, to the offset specified in * the request. Flush is not supported on the _default stream, since it is not BUFFERED. * + *

Sample code: + * + *

{@code
+   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
+   *   WriteStreamName writeStream =
+   *       WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]");
+   *   FlushRowsResponse response = bigQueryWriteClient.flushRows(writeStream);
+   * }
+   * }
+ * * @param writeStream Required. The stream that is the target of the flush operation. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -402,6 +613,16 @@ public final FlushRowsResponse flushRows(WriteStreamName writeStream) { * flushes up to any previously flushed offset in a BUFFERED stream, to the offset specified in * the request. Flush is not supported on the _default stream, since it is not BUFFERED. * + *

Sample code: + * + *

{@code
+   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
+   *   String writeStream =
+   *       WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]").toString();
+   *   FlushRowsResponse response = bigQueryWriteClient.flushRows(writeStream);
+   * }
+   * }
+ * * @param writeStream Required. The stream that is the target of the flush operation. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -417,6 +638,20 @@ public final FlushRowsResponse flushRows(String writeStream) { * flushes up to any previously flushed offset in a BUFFERED stream, to the offset specified in * the request. Flush is not supported on the _default stream, since it is not BUFFERED. * + *

Sample code: + * + *

{@code
+   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
+   *   FlushRowsRequest request =
+   *       FlushRowsRequest.newBuilder()
+   *           .setWriteStream(
+   *               WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]").toString())
+   *           .setOffset(Int64Value.newBuilder().build())
+   *           .build();
+   *   FlushRowsResponse response = bigQueryWriteClient.flushRows(request);
+   * }
+   * }
+ * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ @@ -432,6 +667,21 @@ public final FlushRowsResponse flushRows(FlushRowsRequest request) { * the request. Flush is not supported on the _default stream, since it is not BUFFERED. * *

Sample code: + * + *

{@code
+   * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
+   *   FlushRowsRequest request =
+   *       FlushRowsRequest.newBuilder()
+   *           .setWriteStream(
+   *               WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]").toString())
+   *           .setOffset(Int64Value.newBuilder().build())
+   *           .build();
+   *   ApiFuture future =
+   *       bigQueryWriteClient.flushRowsCallable().futureCall(request);
+   *   // Do something.
+   *   FlushRowsResponse response = future.get();
+   * }
+   * }
*/ public final UnaryCallable flushRowsCallable() { return stub.flushRowsCallable(); diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/package-info.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/package-info.java index 39429df6bf..ae7a37d17b 100644 --- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/package-info.java +++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/package-info.java @@ -28,6 +28,16 @@ * *

Sample for BaseBigQueryReadClient: * + *

{@code
+ * try (BaseBigQueryReadClient baseBigQueryReadClient = BaseBigQueryReadClient.create()) {
+ *   ProjectName parent = ProjectName.of("[PROJECT]");
+ *   ReadSession readSession = ReadSession.newBuilder().build();
+ *   int maxStreamCount = 940837515;
+ *   ReadSession response =
+ *       baseBigQueryReadClient.createReadSession(parent, readSession, maxStreamCount);
+ * }
+ * }
+ * *

======================= BigQueryWriteClient ======================= * *

Service Description: BigQuery Write API. @@ -35,6 +45,14 @@ *

The Write API can be used to write data to BigQuery. * *

Sample for BigQueryWriteClient: + * + *

{@code
+ * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
+ *   TableName parent = TableName.of("[PROJECT]", "[DATASET]", "[TABLE]");
+ *   WriteStream writeStream = WriteStream.newBuilder().build();
+ *   WriteStream response = bigQueryWriteClient.createWriteStream(parent, writeStream);
+ * }
+ * }
*/ @Generated("by gapic-generator-java") package com.google.cloud.bigquery.storage.v1beta2; diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/stub/BigQueryReadStubSettings.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/stub/BigQueryReadStubSettings.java index bb79df916a..3b230461c1 100644 --- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/stub/BigQueryReadStubSettings.java +++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/stub/BigQueryReadStubSettings.java @@ -66,18 +66,18 @@ *

For example, to set the total timeout of createReadSession to 30 seconds: * *

{@code
- * BigQueryReadStubSettings.Builder bigQueryReadSettingsBuilder =
+ * BigQueryReadStubSettings.Builder baseBigQueryReadSettingsBuilder =
  *     BigQueryReadStubSettings.newBuilder();
- * bigQueryReadSettingsBuilder
+ * baseBigQueryReadSettingsBuilder
  *     .createReadSessionSettings()
  *     .setRetrySettings(
- *         bigQueryReadSettingsBuilder
+ *         baseBigQueryReadSettingsBuilder
  *             .createReadSessionSettings()
  *             .getRetrySettings()
  *             .toBuilder()
  *             .setTotalTimeout(Duration.ofSeconds(30))
  *             .build());
- * BigQueryReadStubSettings bigQueryReadSettings = bigQueryReadSettingsBuilder.build();
+ * BigQueryReadStubSettings baseBigQueryReadSettings = baseBigQueryReadSettingsBuilder.build();
  * }
*/ @BetaApi diff --git a/synth.metadata b/synth.metadata index e5ffac6394..44e99f52d3 100644 --- a/synth.metadata +++ b/synth.metadata @@ -11,39 +11,39 @@ "git": { "name": ".", "remote": "https://github.com/googleapis/java-bigquerystorage.git", - "sha": "4961a7c4d0a696af972e6adc4f007358d6e935c5" + "sha": "ba4b1a2d8eaa8f5408e476a3b570cb508aa94f57" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "91e206bcfeaf8948ea03fe3cb1b7616108496cd3", - "internalRef": "350949863" + "sha": "8d8c008e56f1af31d57f75561e0f1848ffb29eeb", + "internalRef": "356341083" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "91e206bcfeaf8948ea03fe3cb1b7616108496cd3", - "internalRef": "350949863" + "sha": "8d8c008e56f1af31d57f75561e0f1848ffb29eeb", + "internalRef": "356341083" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "91e206bcfeaf8948ea03fe3cb1b7616108496cd3", - "internalRef": "350949863" + "sha": "8d8c008e56f1af31d57f75561e0f1848ffb29eeb", + "internalRef": "356341083" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "91e206bcfeaf8948ea03fe3cb1b7616108496cd3", - "internalRef": "350949863" + "sha": "8d8c008e56f1af31d57f75561e0f1848ffb29eeb", + "internalRef": "356341083" } }, {