Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: add BigQuery Storage Write API v1 #301

Merged
merged 2 commits into from Sep 17, 2021
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
6 changes: 6 additions & 0 deletions docs/bigquery_storage_v1/big_query_write.rst
@@ -0,0 +1,6 @@
BigQueryWrite
-------------------------------

.. automodule:: google.cloud.bigquery_storage_v1.services.big_query_write
:members:
:inherited-members:
1 change: 1 addition & 0 deletions docs/bigquery_storage_v1/services.rst
Expand Up @@ -4,3 +4,4 @@ Services for Google Cloud Bigquery Storage v1 API
:maxdepth: 2

big_query_read
big_query_write
44 changes: 44 additions & 0 deletions google/cloud/bigquery_storage/__init__.py
Expand Up @@ -15,6 +15,12 @@
#

from google.cloud.bigquery_storage_v1 import BigQueryReadClient
from google.cloud.bigquery_storage_v1.services.big_query_write.client import (
BigQueryWriteClient,
)
from google.cloud.bigquery_storage_v1.services.big_query_write.async_client import (
BigQueryWriteAsyncClient,
)

from google.cloud.bigquery_storage_v1 import gapic_types as types
from google.cloud.bigquery_storage_v1 import __version__
Expand All @@ -23,34 +29,72 @@
from google.cloud.bigquery_storage_v1.types.arrow import ArrowSerializationOptions
from google.cloud.bigquery_storage_v1.types.avro import AvroRows
from google.cloud.bigquery_storage_v1.types.avro import AvroSchema
from google.cloud.bigquery_storage_v1.types.protobuf import ProtoRows
from google.cloud.bigquery_storage_v1.types.protobuf import ProtoSchema
from google.cloud.bigquery_storage_v1.types.storage import AppendRowsRequest
from google.cloud.bigquery_storage_v1.types.storage import AppendRowsResponse
from google.cloud.bigquery_storage_v1.types.storage import (
BatchCommitWriteStreamsRequest,
)
from google.cloud.bigquery_storage_v1.types.storage import (
BatchCommitWriteStreamsResponse,
)
from google.cloud.bigquery_storage_v1.types.storage import CreateReadSessionRequest
from google.cloud.bigquery_storage_v1.types.storage import CreateWriteStreamRequest
from google.cloud.bigquery_storage_v1.types.storage import FinalizeWriteStreamRequest
from google.cloud.bigquery_storage_v1.types.storage import FinalizeWriteStreamResponse
from google.cloud.bigquery_storage_v1.types.storage import FlushRowsRequest
from google.cloud.bigquery_storage_v1.types.storage import FlushRowsResponse
from google.cloud.bigquery_storage_v1.types.storage import GetWriteStreamRequest
from google.cloud.bigquery_storage_v1.types.storage import ReadRowsRequest
from google.cloud.bigquery_storage_v1.types.storage import ReadRowsResponse
from google.cloud.bigquery_storage_v1.types.storage import SplitReadStreamRequest
from google.cloud.bigquery_storage_v1.types.storage import SplitReadStreamResponse
from google.cloud.bigquery_storage_v1.types.storage import StorageError
from google.cloud.bigquery_storage_v1.types.storage import StreamStats
from google.cloud.bigquery_storage_v1.types.storage import ThrottleState
from google.cloud.bigquery_storage_v1.types.stream import ReadSession
from google.cloud.bigquery_storage_v1.types.stream import ReadStream
from google.cloud.bigquery_storage_v1.types.stream import WriteStream
from google.cloud.bigquery_storage_v1.types.stream import DataFormat
from google.cloud.bigquery_storage_v1.types.table import TableFieldSchema
from google.cloud.bigquery_storage_v1.types.table import TableSchema

__all__ = (
"BigQueryReadClient",
"BigQueryWriteClient",
"BigQueryWriteAsyncClient",
"__version__",
"types",
"ArrowRecordBatch",
"ArrowSchema",
"ArrowSerializationOptions",
"AvroRows",
"AvroSchema",
"ProtoRows",
"ProtoSchema",
"AppendRowsRequest",
"AppendRowsResponse",
"BatchCommitWriteStreamsRequest",
"BatchCommitWriteStreamsResponse",
"CreateReadSessionRequest",
"CreateWriteStreamRequest",
"FinalizeWriteStreamRequest",
"FinalizeWriteStreamResponse",
"FlushRowsRequest",
"FlushRowsResponse",
"GetWriteStreamRequest",
"ReadRowsRequest",
"ReadRowsResponse",
"SplitReadStreamRequest",
"SplitReadStreamResponse",
"StorageError",
"StreamStats",
"ThrottleState",
"ReadSession",
"ReadStream",
"WriteStream",
"DataFormat",
"TableFieldSchema",
"TableSchema",
)
74 changes: 74 additions & 0 deletions google/cloud/bigquery_storage_v1/gapic_metadata.json
Expand Up @@ -48,6 +48,80 @@
}
}
}
},
"BigQueryWrite": {
"clients": {
"grpc": {
"libraryClient": "BigQueryWriteClient",
"rpcs": {
"AppendRows": {
"methods": [
"append_rows"
]
},
"BatchCommitWriteStreams": {
"methods": [
"batch_commit_write_streams"
]
},
"CreateWriteStream": {
"methods": [
"create_write_stream"
]
},
"FinalizeWriteStream": {
"methods": [
"finalize_write_stream"
]
},
"FlushRows": {
"methods": [
"flush_rows"
]
},
"GetWriteStream": {
"methods": [
"get_write_stream"
]
}
}
},
"grpc-async": {
"libraryClient": "BigQueryWriteAsyncClient",
"rpcs": {
"AppendRows": {
"methods": [
"append_rows"
]
},
"BatchCommitWriteStreams": {
"methods": [
"batch_commit_write_streams"
]
},
"CreateWriteStream": {
"methods": [
"create_write_stream"
]
},
"FinalizeWriteStream": {
"methods": [
"finalize_write_stream"
]
},
"FlushRows": {
"methods": [
"flush_rows"
]
},
"GetWriteStream": {
"methods": [
"get_write_stream"
]
}
}
}
}
}
}
}
Expand Up @@ -197,7 +197,7 @@ async def create_read_session(
number of pre-filtered rows, so some filters can lead to
lopsided assignments.

Read sessions automatically expire 24 hours after they
Read sessions automatically expire 6 hours after they
are created and do not require manual clean-up by the
caller.

Expand Down Expand Up @@ -441,17 +441,7 @@ async def split_read_stream(
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.split_read_stream,
default_retry=retries.Retry(
initial=0.1,
maximum=60.0,
multiplier=1.3,
predicate=retries.if_exception_type(
core_exceptions.DeadlineExceeded,
core_exceptions.ServiceUnavailable,
),
deadline=600.0,
),
default_timeout=600.0,
default_timeout=None,
client_info=DEFAULT_CLIENT_INFO,
)

Expand Down
Expand Up @@ -418,7 +418,7 @@ def create_read_session(
number of pre-filtered rows, so some filters can lead to
lopsided assignments.

Read sessions automatically expire 24 hours after they
Read sessions automatically expire 6 hours after they
are created and do not require manual clean-up by the
caller.

Expand Down
Expand Up @@ -189,19 +189,7 @@ def _prep_wrapped_messages(self, client_info):
client_info=client_info,
),
self.split_read_stream: gapic_v1.method.wrap_method(
self.split_read_stream,
default_retry=retries.Retry(
initial=0.1,
maximum=60.0,
multiplier=1.3,
predicate=retries.if_exception_type(
core_exceptions.DeadlineExceeded,
core_exceptions.ServiceUnavailable,
),
deadline=600.0,
),
default_timeout=600.0,
client_info=client_info,
self.split_read_stream, default_timeout=None, client_info=client_info,
),
}

Expand Down
Expand Up @@ -253,7 +253,7 @@ def create_read_session(
number of pre-filtered rows, so some filters can lead to
lopsided assignments.

Read sessions automatically expire 24 hours after they
Read sessions automatically expire 6 hours after they
are created and do not require manual clean-up by the
caller.

Expand Down
Expand Up @@ -256,7 +256,7 @@ def create_read_session(
number of pre-filtered rows, so some filters can lead to
lopsided assignments.

Read sessions automatically expire 24 hours after they
Read sessions automatically expire 6 hours after they
are created and do not require manual clean-up by the
caller.

Expand Down
@@ -0,0 +1,22 @@
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from .client import BigQueryWriteClient
from .async_client import BigQueryWriteAsyncClient

__all__ = (
"BigQueryWriteClient",
"BigQueryWriteAsyncClient",
)