- On January 1, 2020 this library will no longer support Python 2 on the latest released version. - Previously released library versions will continue to be available. For more information please + As of January 1, 2020 this library no longer supports Python 2 on the latest released version. + Library versions released prior to that date will continue to be available. For more information please visit Python 2 support on Google Cloud.
{% block body %} {% endblock %} diff --git a/google/cloud/datastore_admin_v1/gapic/__init__.py b/google/cloud/datastore_admin_v1/gapic/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/google/cloud/datastore_admin_v1/gapic/datastore_admin_client.py b/google/cloud/datastore_admin_v1/gapic/datastore_admin_client.py new file mode 100644 index 00000000..9495419e --- /dev/null +++ b/google/cloud/datastore_admin_v1/gapic/datastore_admin_client.py @@ -0,0 +1,665 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Accesses the google.datastore.admin.v1 DatastoreAdmin API.""" + +import functools +import pkg_resources +import warnings + +from google.oauth2 import service_account +import google.api_core.client_options +import google.api_core.gapic_v1.client_info +import google.api_core.gapic_v1.config +import google.api_core.gapic_v1.method +import google.api_core.gapic_v1.routing_header +import google.api_core.grpc_helpers +import google.api_core.operation +import google.api_core.operations_v1 +import google.api_core.page_iterator +import grpc + +from google.cloud.datastore_admin_v1.gapic import datastore_admin_client_config +from google.cloud.datastore_admin_v1.gapic import enums +from google.cloud.datastore_admin_v1.gapic.transports import ( + datastore_admin_grpc_transport, +) +from google.cloud.datastore_admin_v1.proto import datastore_admin_pb2 +from google.cloud.datastore_admin_v1.proto import datastore_admin_pb2_grpc +from google.cloud.datastore_admin_v1.proto import index_pb2 +from google.longrunning import operations_pb2 +from google.protobuf import empty_pb2 + + +_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution( + "google-cloud-datastore-admin", +).version + + +class DatastoreAdminClient(object): + """ + Google Cloud Datastore Admin API + + + The Datastore Admin API provides several admin services for Cloud Datastore. + + ----------------------------------------------------------------------------- + ## Concepts + + Project, namespace, kind, and entity as defined in the Google Cloud Datastore + API. + + Operation: An Operation represents work being performed in the background. + + EntityFilter: Allows specifying a subset of entities in a project. This is + specified as a combination of kinds and namespaces (either or both of which + may be all). + + ----------------------------------------------------------------------------- + ## Services + + # Export/Import + + The Export/Import service provides the ability to copy all or a subset of + entities to/from Google Cloud Storage. + + Exported data may be imported into Cloud Datastore for any Google Cloud + Platform project. It is not restricted to the export source project. It is + possible to export from one project and then import into another. + + Exported data can also be loaded into Google BigQuery for analysis. + + Exports and imports are performed asynchronously. An Operation resource is + created for each export/import. The state (including any errors encountered) + of the export/import may be queried via the Operation resource. + + # Index + + The index service manages Cloud Datastore composite indexes. + + Index creation and deletion are performed asynchronously. + An Operation resource is created for each such asynchronous operation. + The state of the operation (including any errors encountered) + may be queried via the Operation resource. + + # Operation + + The Operations collection provides a record of actions performed for the + specified project (including any operations in progress). Operations are not + created directly but through calls on other collections or resources. + + An operation that is not yet done may be cancelled. The request to cancel is + asynchronous and the operation may continue to run for some time after the + request to cancel is made. + + An operation that is done may be deleted so that it is no longer listed as + part of the Operation collection. + + ListOperations returns all pending operations, but not completed operations. + + Operations are created by service DatastoreAdmin, + but are accessed via service google.longrunning.Operations. + """ + + SERVICE_ADDRESS = "datastore.googleapis.com:443" + """The default address of the service.""" + + # The name of the interface for this client. This is the key used to + # find the method configuration in the client_config dictionary. + _INTERFACE_NAME = "google.datastore.admin.v1.DatastoreAdmin" + + @classmethod + def from_service_account_file(cls, filename, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DatastoreAdminClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + def __init__( + self, + transport=None, + channel=None, + credentials=None, + client_config=None, + client_info=None, + client_options=None, + ): + """Constructor. + + Args: + transport (Union[~.DatastoreAdminGrpcTransport, + Callable[[~.Credentials, type], ~.DatastoreAdminGrpcTransport]): A transport + instance, responsible for actually making the API calls. + The default transport uses the gRPC protocol. + This argument may also be a callable which returns a + transport instance. Callables will be sent the credentials + as the first argument and the default transport class as + the second argument. + channel (grpc.Channel): DEPRECATED. A ``Channel`` instance + through which to make calls. This argument is mutually exclusive + with ``credentials``; providing both will raise an exception. + credentials (google.auth.credentials.Credentials): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is mutually exclusive with providing a + transport instance to ``transport``; doing so will raise + an exception. + client_config (dict): DEPRECATED. A dictionary of call options for + each method. If not specified, the default configuration is used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + client_options (Union[dict, google.api_core.client_options.ClientOptions]): + Client options used to set user options on the client. API Endpoint + should be set through client_options. + """ + # Raise deprecation warnings for things we want to go away. + if client_config is not None: + warnings.warn( + "The `client_config` argument is deprecated.", + PendingDeprecationWarning, + stacklevel=2, + ) + else: + client_config = datastore_admin_client_config.config + + if channel: + warnings.warn( + "The `channel` argument is deprecated; use " "`transport` instead.", + PendingDeprecationWarning, + stacklevel=2, + ) + + api_endpoint = self.SERVICE_ADDRESS + if client_options: + if type(client_options) == dict: + client_options = google.api_core.client_options.from_dict( + client_options + ) + if client_options.api_endpoint: + api_endpoint = client_options.api_endpoint + + # Instantiate the transport. + # The transport is responsible for handling serialization and + # deserialization and actually sending data to the service. + if transport: + if callable(transport): + self.transport = transport( + credentials=credentials, + default_class=datastore_admin_grpc_transport.DatastoreAdminGrpcTransport, + address=api_endpoint, + ) + else: + if credentials: + raise ValueError( + "Received both a transport instance and " + "credentials; these are mutually exclusive." + ) + self.transport = transport + else: + self.transport = datastore_admin_grpc_transport.DatastoreAdminGrpcTransport( + address=api_endpoint, channel=channel, credentials=credentials, + ) + + if client_info is None: + client_info = google.api_core.gapic_v1.client_info.ClientInfo( + gapic_version=_GAPIC_LIBRARY_VERSION, + ) + else: + client_info.gapic_version = _GAPIC_LIBRARY_VERSION + self._client_info = client_info + + # Parse out the default settings for retry and timeout for each RPC + # from the client configuration. + # (Ordinarily, these are the defaults specified in the `*_config.py` + # file next to this one.) + self._method_configs = google.api_core.gapic_v1.config.parse_method_configs( + client_config["interfaces"][self._INTERFACE_NAME], + ) + + # Save a dictionary of cached API call functions. + # These are the actual callables which invoke the proper + # transport methods, wrapped with `wrap_method` to add retry, + # timeout, and the like. + self._inner_api_calls = {} + + # Service calls + def export_entities( + self, + project_id, + output_url_prefix, + labels=None, + entity_filter=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Exports a copy of all or a subset of entities from Google Cloud Datastore + to another storage system, such as Google Cloud Storage. Recent updates to + entities may not be reflected in the export. The export occurs in the + background and its progress can be monitored and managed via the + Operation resource that is created. The output of an export may only be + used once the associated operation is done. If an export operation is + cancelled before completion it may leave partial data behind in Google + Cloud Storage. + + Example: + >>> from google.cloud import datastore_admin_v1 + >>> + >>> client = datastore_admin_v1.DatastoreAdminClient() + >>> + >>> # TODO: Initialize `project_id`: + >>> project_id = '' + >>> + >>> # TODO: Initialize `output_url_prefix`: + >>> output_url_prefix = '' + >>> + >>> response = client.export_entities(project_id, output_url_prefix) + >>> + >>> def callback(operation_future): + ... # Handle result. + ... result = operation_future.result() + >>> + >>> response.add_done_callback(callback) + >>> + >>> # Handle metadata. + >>> metadata = response.metadata() + + Args: + project_id (str): Required. Project ID against which to make the request. + output_url_prefix (str): Required. Location for the export metadata and data files. + + The full resource URL of the external storage location. Currently, only + Google Cloud Storage is supported. So output_url_prefix should be of the + form: ``gs://BUCKET_NAME[/NAMESPACE_PATH]``, where ``BUCKET_NAME`` is + the name of the Cloud Storage bucket and ``NAMESPACE_PATH`` is an + optional Cloud Storage namespace path (this is not a Cloud Datastore + namespace). For more information about Cloud Storage namespace paths, + see `Object name + considerations `__. + + The resulting files will be nested deeper than the specified URL prefix. + The final output URL will be provided in the + ``google.datastore.admin.v1.ExportEntitiesResponse.output_url`` field. + That value should be used for subsequent ImportEntities operations. + + By nesting the data files deeper, the same Cloud Storage bucket can be + used in multiple ExportEntities operations without conflict. + labels (dict[str -> str]): Client-assigned labels. + entity_filter (Union[dict, ~google.cloud.datastore_admin_v1.types.EntityFilter]): Description of what data from the project is included in the export. + + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.datastore_admin_v1.types.EntityFilter` + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.datastore_admin_v1.types._OperationFuture` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "export_entities" not in self._inner_api_calls: + self._inner_api_calls[ + "export_entities" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.export_entities, + default_retry=self._method_configs["ExportEntities"].retry, + default_timeout=self._method_configs["ExportEntities"].timeout, + client_info=self._client_info, + ) + + request = datastore_admin_pb2.ExportEntitiesRequest( + project_id=project_id, + output_url_prefix=output_url_prefix, + labels=labels, + entity_filter=entity_filter, + ) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("project_id", project_id)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + + operation = self._inner_api_calls["export_entities"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + return google.api_core.operation.from_gapic( + operation, + self.transport._operations_client, + datastore_admin_pb2.ExportEntitiesResponse, + metadata_type=datastore_admin_pb2.ExportEntitiesMetadata, + ) + + def import_entities( + self, + project_id, + input_url, + labels=None, + entity_filter=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Imports entities into Google Cloud Datastore. Existing entities with the + same key are overwritten. The import occurs in the background and its + progress can be monitored and managed via the Operation resource that is + created. If an ImportEntities operation is cancelled, it is possible + that a subset of the data has already been imported to Cloud Datastore. + + Example: + >>> from google.cloud import datastore_admin_v1 + >>> + >>> client = datastore_admin_v1.DatastoreAdminClient() + >>> + >>> # TODO: Initialize `project_id`: + >>> project_id = '' + >>> + >>> # TODO: Initialize `input_url`: + >>> input_url = '' + >>> + >>> response = client.import_entities(project_id, input_url) + >>> + >>> def callback(operation_future): + ... # Handle result. + ... result = operation_future.result() + >>> + >>> response.add_done_callback(callback) + >>> + >>> # Handle metadata. + >>> metadata = response.metadata() + + Args: + project_id (str): Required. Project ID against which to make the request. + input_url (str): Required. The full resource URL of the external storage location. + Currently, only Google Cloud Storage is supported. So input_url should + be of the form: + ``gs://BUCKET_NAME[/NAMESPACE_PATH]/OVERALL_EXPORT_METADATA_FILE``, + where ``BUCKET_NAME`` is the name of the Cloud Storage bucket, + ``NAMESPACE_PATH`` is an optional Cloud Storage namespace path (this is + not a Cloud Datastore namespace), and ``OVERALL_EXPORT_METADATA_FILE`` + is the metadata file written by the ExportEntities operation. For more + information about Cloud Storage namespace paths, see `Object name + considerations `__. + + For more information, see + ``google.datastore.admin.v1.ExportEntitiesResponse.output_url``. + labels (dict[str -> str]): Client-assigned labels. + entity_filter (Union[dict, ~google.cloud.datastore_admin_v1.types.EntityFilter]): Optionally specify which kinds/namespaces are to be imported. If + provided, the list must be a subset of the EntityFilter used in creating + the export, otherwise a FAILED_PRECONDITION error will be returned. If + no filter is specified then all entities from the export are imported. + + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.datastore_admin_v1.types.EntityFilter` + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.datastore_admin_v1.types._OperationFuture` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "import_entities" not in self._inner_api_calls: + self._inner_api_calls[ + "import_entities" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.import_entities, + default_retry=self._method_configs["ImportEntities"].retry, + default_timeout=self._method_configs["ImportEntities"].timeout, + client_info=self._client_info, + ) + + request = datastore_admin_pb2.ImportEntitiesRequest( + project_id=project_id, + input_url=input_url, + labels=labels, + entity_filter=entity_filter, + ) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("project_id", project_id)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + + operation = self._inner_api_calls["import_entities"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + return google.api_core.operation.from_gapic( + operation, + self.transport._operations_client, + empty_pb2.Empty, + metadata_type=datastore_admin_pb2.ImportEntitiesMetadata, + ) + + def get_index( + self, + project_id=None, + index_id=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Gets an index. + + Example: + >>> from google.cloud import datastore_admin_v1 + >>> + >>> client = datastore_admin_v1.DatastoreAdminClient() + >>> + >>> response = client.get_index() + + Args: + project_id (str): Project ID against which to make the request. + index_id (str): The resource ID of the index to get. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.datastore_admin_v1.types.Index` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "get_index" not in self._inner_api_calls: + self._inner_api_calls[ + "get_index" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.get_index, + default_retry=self._method_configs["GetIndex"].retry, + default_timeout=self._method_configs["GetIndex"].timeout, + client_info=self._client_info, + ) + + request = datastore_admin_pb2.GetIndexRequest( + project_id=project_id, index_id=index_id, + ) + return self._inner_api_calls["get_index"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + + def list_indexes( + self, + project_id=None, + filter_=None, + page_size=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Lists the indexes that match the specified filters. Datastore uses an + eventually consistent query to fetch the list of indexes and may + occasionally return stale results. + + Example: + >>> from google.cloud import datastore_admin_v1 + >>> + >>> client = datastore_admin_v1.DatastoreAdminClient() + >>> + >>> # Iterate over all results + >>> for element in client.list_indexes(): + ... # process element + ... pass + >>> + >>> + >>> # Alternatively: + >>> + >>> # Iterate over results one page at a time + >>> for page in client.list_indexes().pages: + ... for element in page: + ... # process element + ... pass + + Args: + project_id (str): Project ID against which to make the request. + filter_ (str) + page_size (int): The maximum number of resources contained in the + underlying API response. If page streaming is performed per- + resource, this parameter does not affect the return value. If page + streaming is performed per-page, this determines the maximum number + of resources in a page. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.api_core.page_iterator.PageIterator` instance. + An iterable of :class:`~google.cloud.datastore_admin_v1.types.Index` instances. + You can also iterate over the pages of the response + using its `pages` property. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "list_indexes" not in self._inner_api_calls: + self._inner_api_calls[ + "list_indexes" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.list_indexes, + default_retry=self._method_configs["ListIndexes"].retry, + default_timeout=self._method_configs["ListIndexes"].timeout, + client_info=self._client_info, + ) + + request = datastore_admin_pb2.ListIndexesRequest( + project_id=project_id, filter=filter_, page_size=page_size, + ) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("project_id", project_id)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + + iterator = google.api_core.page_iterator.GRPCIterator( + client=None, + method=functools.partial( + self._inner_api_calls["list_indexes"], + retry=retry, + timeout=timeout, + metadata=metadata, + ), + request=request, + items_field="indexes", + request_token_field="page_token", + response_token_field="next_page_token", + ) + return iterator diff --git a/google/cloud/datastore_admin_v1/gapic/datastore_admin_client_config.py b/google/cloud/datastore_admin_v1/gapic/datastore_admin_client_config.py new file mode 100644 index 00000000..dbbe2b85 --- /dev/null +++ b/google/cloud/datastore_admin_v1/gapic/datastore_admin_client_config.py @@ -0,0 +1,43 @@ +config = { + "interfaces": { + "google.datastore.admin.v1.DatastoreAdmin": { + "retry_codes": { + "idempotent": ["DEADLINE_EXCEEDED", "UNAVAILABLE"], + "non_idempotent": [], + }, + "retry_params": { + "default": { + "initial_retry_delay_millis": 100, + "retry_delay_multiplier": 1.3, + "max_retry_delay_millis": 60000, + "initial_rpc_timeout_millis": 20000, + "rpc_timeout_multiplier": 1.0, + "max_rpc_timeout_millis": 20000, + "total_timeout_millis": 600000, + } + }, + "methods": { + "ExportEntities": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default", + }, + "ImportEntities": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default", + }, + "GetIndex": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default", + }, + "ListIndexes": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default", + }, + }, + } + } +} diff --git a/google/cloud/datastore_admin_v1/gapic/enums.py b/google/cloud/datastore_admin_v1/gapic/enums.py new file mode 100644 index 00000000..77c303fc --- /dev/null +++ b/google/cloud/datastore_admin_v1/gapic/enums.py @@ -0,0 +1,130 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Wrappers for protocol buffer enum types.""" + +import enum + + +class OperationType(enum.IntEnum): + """ + Operation types. + + Attributes: + OPERATION_TYPE_UNSPECIFIED (int): Unspecified. + EXPORT_ENTITIES (int): ExportEntities. + IMPORT_ENTITIES (int): ImportEntities. + CREATE_INDEX (int): CreateIndex. + DELETE_INDEX (int): DeleteIndex. + """ + + OPERATION_TYPE_UNSPECIFIED = 0 + EXPORT_ENTITIES = 1 + IMPORT_ENTITIES = 2 + CREATE_INDEX = 3 + DELETE_INDEX = 4 + + +class CommonMetadata(object): + class State(enum.IntEnum): + """ + The various possible states for an ongoing Operation. + + Attributes: + STATE_UNSPECIFIED (int): Unspecified. + INITIALIZING (int): Request is being prepared for processing. + PROCESSING (int): Request is actively being processed. + CANCELLING (int): Request is in the process of being cancelled after user called + google.longrunning.Operations.CancelOperation on the operation. + FINALIZING (int): Request has been processed and is in its finalization stage. + SUCCESSFUL (int): Request has completed successfully. + FAILED (int): Request has finished being processed, but encountered an error. + CANCELLED (int): Request has finished being cancelled after user called + google.longrunning.Operations.CancelOperation. + """ + + STATE_UNSPECIFIED = 0 + INITIALIZING = 1 + PROCESSING = 2 + CANCELLING = 3 + FINALIZING = 4 + SUCCESSFUL = 5 + FAILED = 6 + CANCELLED = 7 + + +class Index(object): + class AncestorMode(enum.IntEnum): + """ + For an ordered index, specifies whether each of the entity's ancestors + will be included. + + Attributes: + ANCESTOR_MODE_UNSPECIFIED (int): The ancestor mode is unspecified. + NONE (int): Do not include the entity's ancestors in the index. + ALL_ANCESTORS (int): Include all the entity's ancestors in the index. + """ + + ANCESTOR_MODE_UNSPECIFIED = 0 + NONE = 1 + ALL_ANCESTORS = 2 + + class Direction(enum.IntEnum): + """ + The direction determines how a property is indexed. + + Attributes: + DIRECTION_UNSPECIFIED (int): The direction is unspecified. + ASCENDING (int): The property's values are indexed so as to support sequencing in + ascending order and also query by <, >, <=, >=, and =. + DESCENDING (int): The property's values are indexed so as to support sequencing in + descending order and also query by <, >, <=, >=, and =. + """ + + DIRECTION_UNSPECIFIED = 0 + ASCENDING = 1 + DESCENDING = 2 + + class State(enum.IntEnum): + """ + The possible set of states of an index. + + Attributes: + STATE_UNSPECIFIED (int): The state is unspecified. + CREATING (int): The index is being created, and cannot be used by queries. + There is an active long-running operation for the index. + The index is updated when writing an entity. + Some index data may exist. + READY (int): The index is ready to be used. + The index is updated when writing an entity. + The index is fully populated from all stored entities it applies to. + DELETING (int): The index is being deleted, and cannot be used by queries. + There is an active long-running operation for the index. + The index is not updated when writing an entity. + Some index data may exist. + ERROR (int): The index was being created or deleted, but something went wrong. + The index cannot by used by queries. + There is no active long-running operation for the index, + and the most recently finished long-running operation failed. + The index is not updated when writing an entity. + Some index data may exist. + """ + + STATE_UNSPECIFIED = 0 + CREATING = 1 + READY = 2 + DELETING = 3 + ERROR = 4 diff --git a/google/cloud/datastore_admin_v1/gapic/transports/__init__.py b/google/cloud/datastore_admin_v1/gapic/transports/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/google/cloud/datastore_admin_v1/gapic/transports/datastore_admin_grpc_transport.py b/google/cloud/datastore_admin_v1/gapic/transports/datastore_admin_grpc_transport.py new file mode 100644 index 00000000..11fd92af --- /dev/null +++ b/google/cloud/datastore_admin_v1/gapic/transports/datastore_admin_grpc_transport.py @@ -0,0 +1,186 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import google.api_core.grpc_helpers +import google.api_core.operations_v1 + +from google.cloud.datastore_admin_v1.proto import datastore_admin_pb2_grpc + + +class DatastoreAdminGrpcTransport(object): + """gRPC transport class providing stubs for + google.datastore.admin.v1 DatastoreAdmin API. + + The transport provides access to the raw gRPC stubs, + which can be used to take advantage of advanced + features of gRPC. + """ + + # The scopes needed to make gRPC calls to all of the methods defined + # in this service. + _OAUTH_SCOPES = ( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/datastore", + ) + + def __init__( + self, channel=None, credentials=None, address="datastore.googleapis.com:443" + ): + """Instantiate the transport class. + + Args: + channel (grpc.Channel): A ``Channel`` instance through + which to make calls. This argument is mutually exclusive + with ``credentials``; providing both will raise an exception. + credentials (google.auth.credentials.Credentials): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If none + are specified, the client will attempt to ascertain the + credentials from the environment. + address (str): The address where the service is hosted. + """ + # If both `channel` and `credentials` are specified, raise an + # exception (channels come with credentials baked in already). + if channel is not None and credentials is not None: + raise ValueError( + "The `channel` and `credentials` arguments are mutually " "exclusive.", + ) + + # Create the channel. + if channel is None: + channel = self.create_channel( + address=address, + credentials=credentials, + options={ + "grpc.max_send_message_length": -1, + "grpc.max_receive_message_length": -1, + }.items(), + ) + + self._channel = channel + + # gRPC uses objects called "stubs" that are bound to the + # channel and provide a basic method for each RPC. + self._stubs = { + "datastore_admin_stub": datastore_admin_pb2_grpc.DatastoreAdminStub( + channel + ), + } + + # Because this API includes a method that returns a + # long-running operation (proto: google.longrunning.Operation), + # instantiate an LRO client. + self._operations_client = google.api_core.operations_v1.OperationsClient( + channel + ) + + @classmethod + def create_channel( + cls, address="datastore.googleapis.com:443", credentials=None, **kwargs + ): + """Create and return a gRPC channel object. + + Args: + address (str): The host for the channel to use. + credentials (~.Credentials): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + kwargs (dict): Keyword arguments, which are passed to the + channel creation. + + Returns: + grpc.Channel: A gRPC channel object. + """ + return google.api_core.grpc_helpers.create_channel( + address, credentials=credentials, scopes=cls._OAUTH_SCOPES, **kwargs + ) + + @property + def channel(self): + """The gRPC channel used by the transport. + + Returns: + grpc.Channel: A gRPC channel object. + """ + return self._channel + + @property + def export_entities(self): + """Return the gRPC stub for :meth:`DatastoreAdminClient.export_entities`. + + Exports a copy of all or a subset of entities from Google Cloud Datastore + to another storage system, such as Google Cloud Storage. Recent updates to + entities may not be reflected in the export. The export occurs in the + background and its progress can be monitored and managed via the + Operation resource that is created. The output of an export may only be + used once the associated operation is done. If an export operation is + cancelled before completion it may leave partial data behind in Google + Cloud Storage. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["datastore_admin_stub"].ExportEntities + + @property + def import_entities(self): + """Return the gRPC stub for :meth:`DatastoreAdminClient.import_entities`. + + Imports entities into Google Cloud Datastore. Existing entities with the + same key are overwritten. The import occurs in the background and its + progress can be monitored and managed via the Operation resource that is + created. If an ImportEntities operation is cancelled, it is possible + that a subset of the data has already been imported to Cloud Datastore. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["datastore_admin_stub"].ImportEntities + + @property + def get_index(self): + """Return the gRPC stub for :meth:`DatastoreAdminClient.get_index`. + + Gets an index. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["datastore_admin_stub"].GetIndex + + @property + def list_indexes(self): + """Return the gRPC stub for :meth:`DatastoreAdminClient.list_indexes`. + + Lists the indexes that match the specified filters. Datastore uses an + eventually consistent query to fetch the list of indexes and may + occasionally return stale results. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["datastore_admin_stub"].ListIndexes diff --git a/google/cloud/datastore_admin_v1/proto/__init__.py b/google/cloud/datastore_admin_v1/proto/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/google/cloud/datastore_admin_v1/proto/datastore_admin.proto b/google/cloud/datastore_admin_v1/proto/datastore_admin.proto new file mode 100644 index 00000000..c0f47076 --- /dev/null +++ b/google/cloud/datastore_admin_v1/proto/datastore_admin.proto @@ -0,0 +1,425 @@ +// Copyright 2019 Google LLC. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.datastore.admin.v1; + +import "google/api/annotations.proto"; +import "google/api/client.proto"; +import "google/api/field_behavior.proto"; +import "google/datastore/admin/v1/index.proto"; +import "google/longrunning/operations.proto"; +import "google/protobuf/timestamp.proto"; + +option csharp_namespace = "Google.Cloud.Datastore.Admin.V1"; +option go_package = "google.golang.org/genproto/googleapis/datastore/admin/v1;admin"; +option java_multiple_files = true; +option java_outer_classname = "DatastoreAdminProto"; +option java_package = "com.google.datastore.admin.v1"; +option ruby_package = "Google::Cloud::Datastore::Admin::V1"; + +// Google Cloud Datastore Admin API +// +// +// The Datastore Admin API provides several admin services for Cloud Datastore. +// +// ----------------------------------------------------------------------------- +// ## Concepts +// +// Project, namespace, kind, and entity as defined in the Google Cloud Datastore +// API. +// +// Operation: An Operation represents work being performed in the background. +// +// EntityFilter: Allows specifying a subset of entities in a project. This is +// specified as a combination of kinds and namespaces (either or both of which +// may be all). +// +// ----------------------------------------------------------------------------- +// ## Services +// +// # Export/Import +// +// The Export/Import service provides the ability to copy all or a subset of +// entities to/from Google Cloud Storage. +// +// Exported data may be imported into Cloud Datastore for any Google Cloud +// Platform project. It is not restricted to the export source project. It is +// possible to export from one project and then import into another. +// +// Exported data can also be loaded into Google BigQuery for analysis. +// +// Exports and imports are performed asynchronously. An Operation resource is +// created for each export/import. The state (including any errors encountered) +// of the export/import may be queried via the Operation resource. +// +// # Index +// +// The index service manages Cloud Datastore composite indexes. +// +// Index creation and deletion are performed asynchronously. +// An Operation resource is created for each such asynchronous operation. +// The state of the operation (including any errors encountered) +// may be queried via the Operation resource. +// +// # Operation +// +// The Operations collection provides a record of actions performed for the +// specified project (including any operations in progress). Operations are not +// created directly but through calls on other collections or resources. +// +// An operation that is not yet done may be cancelled. The request to cancel is +// asynchronous and the operation may continue to run for some time after the +// request to cancel is made. +// +// An operation that is done may be deleted so that it is no longer listed as +// part of the Operation collection. +// +// ListOperations returns all pending operations, but not completed operations. +// +// Operations are created by service DatastoreAdmin, +// but are accessed via service google.longrunning.Operations. +service DatastoreAdmin { + option (google.api.default_host) = "datastore.googleapis.com"; + option (google.api.oauth_scopes) = + "https://www.googleapis.com/auth/cloud-platform," + "https://www.googleapis.com/auth/datastore"; + + // Exports a copy of all or a subset of entities from Google Cloud Datastore + // to another storage system, such as Google Cloud Storage. Recent updates to + // entities may not be reflected in the export. The export occurs in the + // background and its progress can be monitored and managed via the + // Operation resource that is created. The output of an export may only be + // used once the associated operation is done. If an export operation is + // cancelled before completion it may leave partial data behind in Google + // Cloud Storage. + rpc ExportEntities(ExportEntitiesRequest) returns (google.longrunning.Operation) { + option (google.api.http) = { + post: "/v1/projects/{project_id}:export" + body: "*" + }; + option (google.api.method_signature) = "project_id,labels,entity_filter,output_url_prefix"; + option (google.longrunning.operation_info) = { + response_type: "ExportEntitiesResponse" + metadata_type: "ExportEntitiesMetadata" + }; + } + + // Imports entities into Google Cloud Datastore. Existing entities with the + // same key are overwritten. The import occurs in the background and its + // progress can be monitored and managed via the Operation resource that is + // created. If an ImportEntities operation is cancelled, it is possible + // that a subset of the data has already been imported to Cloud Datastore. + rpc ImportEntities(ImportEntitiesRequest) returns (google.longrunning.Operation) { + option (google.api.http) = { + post: "/v1/projects/{project_id}:import" + body: "*" + }; + option (google.api.method_signature) = "project_id,labels,input_url,entity_filter"; + option (google.longrunning.operation_info) = { + response_type: "google.protobuf.Empty" + metadata_type: "ImportEntitiesMetadata" + }; + } + + // Gets an index. + rpc GetIndex(GetIndexRequest) returns (Index) { + option (google.api.http) = { + get: "/v1/projects/{project_id}/indexes/{index_id}" + }; + } + + // Lists the indexes that match the specified filters. Datastore uses an + // eventually consistent query to fetch the list of indexes and may + // occasionally return stale results. + rpc ListIndexes(ListIndexesRequest) returns (ListIndexesResponse) { + option (google.api.http) = { + get: "/v1/projects/{project_id}/indexes" + }; + } +} + +// Metadata common to all Datastore Admin operations. +message CommonMetadata { + // The various possible states for an ongoing Operation. + enum State { + // Unspecified. + STATE_UNSPECIFIED = 0; + + // Request is being prepared for processing. + INITIALIZING = 1; + + // Request is actively being processed. + PROCESSING = 2; + + // Request is in the process of being cancelled after user called + // google.longrunning.Operations.CancelOperation on the operation. + CANCELLING = 3; + + // Request has been processed and is in its finalization stage. + FINALIZING = 4; + + // Request has completed successfully. + SUCCESSFUL = 5; + + // Request has finished being processed, but encountered an error. + FAILED = 6; + + // Request has finished being cancelled after user called + // google.longrunning.Operations.CancelOperation. + CANCELLED = 7; + } + + // The time that work began on the operation. + google.protobuf.Timestamp start_time = 1; + + // The time the operation ended, either successfully or otherwise. + google.protobuf.Timestamp end_time = 2; + + // The type of the operation. Can be used as a filter in + // ListOperationsRequest. + OperationType operation_type = 3; + + // The client-assigned labels which were provided when the operation was + // created. May also include additional labels. + map labels = 4; + + // The current state of the Operation. + State state = 5; +} + +// Operation types. +enum OperationType { + // Unspecified. + OPERATION_TYPE_UNSPECIFIED = 0; + + // ExportEntities. + EXPORT_ENTITIES = 1; + + // ImportEntities. + IMPORT_ENTITIES = 2; + + // CreateIndex. + CREATE_INDEX = 3; + + // DeleteIndex. + DELETE_INDEX = 4; +} + +// Measures the progress of a particular metric. +message Progress { + // The amount of work that has been completed. Note that this may be greater + // than work_estimated. + int64 work_completed = 1; + + // An estimate of how much work needs to be performed. May be zero if the + // work estimate is unavailable. + int64 work_estimated = 2; +} + +// The request for +// [google.datastore.admin.v1.DatastoreAdmin.ExportEntities][google.datastore.admin.v1.DatastoreAdmin.ExportEntities]. +message ExportEntitiesRequest { + // Required. Project ID against which to make the request. + string project_id = 1 [(google.api.field_behavior) = REQUIRED]; + + // Client-assigned labels. + map labels = 2; + + // Description of what data from the project is included in the export. + EntityFilter entity_filter = 3; + + // Required. Location for the export metadata and data files. + // + // The full resource URL of the external storage location. Currently, only + // Google Cloud Storage is supported. So output_url_prefix should be of the + // form: `gs://BUCKET_NAME[/NAMESPACE_PATH]`, where `BUCKET_NAME` is the + // name of the Cloud Storage bucket and `NAMESPACE_PATH` is an optional Cloud + // Storage namespace path (this is not a Cloud Datastore namespace). For more + // information about Cloud Storage namespace paths, see + // [Object name + // considerations](https://cloud.google.com/storage/docs/naming#object-considerations). + // + // The resulting files will be nested deeper than the specified URL prefix. + // The final output URL will be provided in the + // [google.datastore.admin.v1.ExportEntitiesResponse.output_url][google.datastore.admin.v1.ExportEntitiesResponse.output_url] field. That + // value should be used for subsequent ImportEntities operations. + // + // By nesting the data files deeper, the same Cloud Storage bucket can be used + // in multiple ExportEntities operations without conflict. + string output_url_prefix = 4 [(google.api.field_behavior) = REQUIRED]; +} + +// The request for +// [google.datastore.admin.v1.DatastoreAdmin.ImportEntities][google.datastore.admin.v1.DatastoreAdmin.ImportEntities]. +message ImportEntitiesRequest { + // Required. Project ID against which to make the request. + string project_id = 1 [(google.api.field_behavior) = REQUIRED]; + + // Client-assigned labels. + map labels = 2; + + // Required. The full resource URL of the external storage location. Currently, only + // Google Cloud Storage is supported. So input_url should be of the form: + // `gs://BUCKET_NAME[/NAMESPACE_PATH]/OVERALL_EXPORT_METADATA_FILE`, where + // `BUCKET_NAME` is the name of the Cloud Storage bucket, `NAMESPACE_PATH` is + // an optional Cloud Storage namespace path (this is not a Cloud Datastore + // namespace), and `OVERALL_EXPORT_METADATA_FILE` is the metadata file written + // by the ExportEntities operation. For more information about Cloud Storage + // namespace paths, see + // [Object name + // considerations](https://cloud.google.com/storage/docs/naming#object-considerations). + // + // For more information, see + // [google.datastore.admin.v1.ExportEntitiesResponse.output_url][google.datastore.admin.v1.ExportEntitiesResponse.output_url]. + string input_url = 3 [(google.api.field_behavior) = REQUIRED]; + + // Optionally specify which kinds/namespaces are to be imported. If provided, + // the list must be a subset of the EntityFilter used in creating the export, + // otherwise a FAILED_PRECONDITION error will be returned. If no filter is + // specified then all entities from the export are imported. + EntityFilter entity_filter = 4; +} + +// The response for +// [google.datastore.admin.v1.DatastoreAdmin.ExportEntities][google.datastore.admin.v1.DatastoreAdmin.ExportEntities]. +message ExportEntitiesResponse { + // Location of the output metadata file. This can be used to begin an import + // into Cloud Datastore (this project or another project). See + // [google.datastore.admin.v1.ImportEntitiesRequest.input_url][google.datastore.admin.v1.ImportEntitiesRequest.input_url]. + // Only present if the operation completed successfully. + string output_url = 1; +} + +// Metadata for ExportEntities operations. +message ExportEntitiesMetadata { + // Metadata common to all Datastore Admin operations. + CommonMetadata common = 1; + + // An estimate of the number of entities processed. + Progress progress_entities = 2; + + // An estimate of the number of bytes processed. + Progress progress_bytes = 3; + + // Description of which entities are being exported. + EntityFilter entity_filter = 4; + + // Location for the export metadata and data files. This will be the same + // value as the + // [google.datastore.admin.v1.ExportEntitiesRequest.output_url_prefix][google.datastore.admin.v1.ExportEntitiesRequest.output_url_prefix] + // field. The final output location is provided in + // [google.datastore.admin.v1.ExportEntitiesResponse.output_url][google.datastore.admin.v1.ExportEntitiesResponse.output_url]. + string output_url_prefix = 5; +} + +// Metadata for ImportEntities operations. +message ImportEntitiesMetadata { + // Metadata common to all Datastore Admin operations. + CommonMetadata common = 1; + + // An estimate of the number of entities processed. + Progress progress_entities = 2; + + // An estimate of the number of bytes processed. + Progress progress_bytes = 3; + + // Description of which entities are being imported. + EntityFilter entity_filter = 4; + + // The location of the import metadata file. This will be the same value as + // the [google.datastore.admin.v1.ExportEntitiesResponse.output_url][google.datastore.admin.v1.ExportEntitiesResponse.output_url] field. + string input_url = 5; +} + +// Identifies a subset of entities in a project. This is specified as +// combinations of kinds and namespaces (either or both of which may be all, as +// described in the following examples). +// Example usage: +// +// Entire project: +// kinds=[], namespace_ids=[] +// +// Kinds Foo and Bar in all namespaces: +// kinds=['Foo', 'Bar'], namespace_ids=[] +// +// Kinds Foo and Bar only in the default namespace: +// kinds=['Foo', 'Bar'], namespace_ids=[''] +// +// Kinds Foo and Bar in both the default and Baz namespaces: +// kinds=['Foo', 'Bar'], namespace_ids=['', 'Baz'] +// +// The entire Baz namespace: +// kinds=[], namespace_ids=['Baz'] +message EntityFilter { + // If empty, then this represents all kinds. + repeated string kinds = 1; + + // An empty list represents all namespaces. This is the preferred + // usage for projects that don't use namespaces. + // + // An empty string element represents the default namespace. This should be + // used if the project has data in non-default namespaces, but doesn't want to + // include them. + // Each namespace in this list must be unique. + repeated string namespace_ids = 2; +} + +// The request for [google.datastore.admin.v1.DatastoreAdmin.GetIndex][google.datastore.admin.v1.DatastoreAdmin.GetIndex]. +message GetIndexRequest { + // Project ID against which to make the request. + string project_id = 1; + + // The resource ID of the index to get. + string index_id = 3; +} + +// The request for +// [google.datastore.admin.v1.DatastoreAdmin.ListIndexes][google.datastore.admin.v1.DatastoreAdmin.ListIndexes]. +message ListIndexesRequest { + // Project ID against which to make the request. + string project_id = 1; + + string filter = 3; + + // The maximum number of items to return. If zero, then all results will be + // returned. + int32 page_size = 4; + + // The next_page_token value returned from a previous List request, if any. + string page_token = 5; +} + +// The response for +// [google.datastore.admin.v1.DatastoreAdmin.ListIndexes][google.datastore.admin.v1.DatastoreAdmin.ListIndexes]. +message ListIndexesResponse { + // The indexes. + repeated Index indexes = 1; + + // The standard List next-page token. + string next_page_token = 2; +} + +// Metadata for Index operations. +message IndexOperationMetadata { + // Metadata common to all Datastore Admin operations. + CommonMetadata common = 1; + + // An estimate of the number of entities processed. + Progress progress_entities = 2; + + // The index resource ID that this operation is acting on. + string index_id = 3; +} diff --git a/google/cloud/datastore_admin_v1/proto/datastore_admin_pb2.py b/google/cloud/datastore_admin_v1/proto/datastore_admin_pb2.py new file mode 100644 index 00000000..f16463bb --- /dev/null +++ b/google/cloud/datastore_admin_v1/proto/datastore_admin_pb2.py @@ -0,0 +1,1847 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/cloud/datastore_admin_v1/proto/datastore_admin.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import enum_type_wrapper +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database + +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.api import client_pb2 as google_dot_api_dot_client__pb2 +from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 +from google.cloud.datastore_admin_v1.proto import ( + index_pb2 as google_dot_cloud_dot_datastore__admin__v1_dot_proto_dot_index__pb2, +) +from google.longrunning import ( + operations_pb2 as google_dot_longrunning_dot_operations__pb2, +) +from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 + + +DESCRIPTOR = _descriptor.FileDescriptor( + name="google/cloud/datastore_admin_v1/proto/datastore_admin.proto", + package="google.datastore.admin.v1", + syntax="proto3", + serialized_options=b"\n\035com.google.datastore.admin.v1B\023DatastoreAdminProtoP\001Z>google.golang.org/genproto/googleapis/datastore/admin/v1;admin\252\002\037Google.Cloud.Datastore.Admin.V1\352\002#Google::Cloud::Datastore::Admin::V1", + create_key=_descriptor._internal_create_key, + serialized_pb=b'\n;google/cloud/datastore_admin_v1/proto/datastore_admin.proto\x12\x19google.datastore.admin.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x31google/cloud/datastore_admin_v1/proto/index.proto\x1a#google/longrunning/operations.proto\x1a\x1fgoogle/protobuf/timestamp.proto"\xf4\x03\n\x0e\x43ommonMetadata\x12.\n\nstart_time\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12,\n\x08\x65nd_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12@\n\x0eoperation_type\x18\x03 \x01(\x0e\x32(.google.datastore.admin.v1.OperationType\x12\x45\n\x06labels\x18\x04 \x03(\x0b\x32\x35.google.datastore.admin.v1.CommonMetadata.LabelsEntry\x12>\n\x05state\x18\x05 \x01(\x0e\x32/.google.datastore.admin.v1.CommonMetadata.State\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\x8b\x01\n\x05State\x12\x15\n\x11STATE_UNSPECIFIED\x10\x00\x12\x10\n\x0cINITIALIZING\x10\x01\x12\x0e\n\nPROCESSING\x10\x02\x12\x0e\n\nCANCELLING\x10\x03\x12\x0e\n\nFINALIZING\x10\x04\x12\x0e\n\nSUCCESSFUL\x10\x05\x12\n\n\x06\x46\x41ILED\x10\x06\x12\r\n\tCANCELLED\x10\x07":\n\x08Progress\x12\x16\n\x0ework_completed\x18\x01 \x01(\x03\x12\x16\n\x0ework_estimated\x18\x02 \x01(\x03"\x8d\x02\n\x15\x45xportEntitiesRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12L\n\x06labels\x18\x02 \x03(\x0b\x32<.google.datastore.admin.v1.ExportEntitiesRequest.LabelsEntry\x12>\n\rentity_filter\x18\x03 \x01(\x0b\x32\'.google.datastore.admin.v1.EntityFilter\x12\x1e\n\x11output_url_prefix\x18\x04 \x01(\tB\x03\xe0\x41\x02\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\x85\x02\n\x15ImportEntitiesRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12L\n\x06labels\x18\x02 \x03(\x0b\x32<.google.datastore.admin.v1.ImportEntitiesRequest.LabelsEntry\x12\x16\n\tinput_url\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12>\n\rentity_filter\x18\x04 \x01(\x0b\x32\'.google.datastore.admin.v1.EntityFilter\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01",\n\x16\x45xportEntitiesResponse\x12\x12\n\noutput_url\x18\x01 \x01(\t"\xab\x02\n\x16\x45xportEntitiesMetadata\x12\x39\n\x06\x63ommon\x18\x01 \x01(\x0b\x32).google.datastore.admin.v1.CommonMetadata\x12>\n\x11progress_entities\x18\x02 \x01(\x0b\x32#.google.datastore.admin.v1.Progress\x12;\n\x0eprogress_bytes\x18\x03 \x01(\x0b\x32#.google.datastore.admin.v1.Progress\x12>\n\rentity_filter\x18\x04 \x01(\x0b\x32\'.google.datastore.admin.v1.EntityFilter\x12\x19\n\x11output_url_prefix\x18\x05 \x01(\t"\xa3\x02\n\x16ImportEntitiesMetadata\x12\x39\n\x06\x63ommon\x18\x01 \x01(\x0b\x32).google.datastore.admin.v1.CommonMetadata\x12>\n\x11progress_entities\x18\x02 \x01(\x0b\x32#.google.datastore.admin.v1.Progress\x12;\n\x0eprogress_bytes\x18\x03 \x01(\x0b\x32#.google.datastore.admin.v1.Progress\x12>\n\rentity_filter\x18\x04 \x01(\x0b\x32\'.google.datastore.admin.v1.EntityFilter\x12\x11\n\tinput_url\x18\x05 \x01(\t"4\n\x0c\x45ntityFilter\x12\r\n\x05kinds\x18\x01 \x03(\t\x12\x15\n\rnamespace_ids\x18\x02 \x03(\t"7\n\x0fGetIndexRequest\x12\x12\n\nproject_id\x18\x01 \x01(\t\x12\x10\n\x08index_id\x18\x03 \x01(\t"_\n\x12ListIndexesRequest\x12\x12\n\nproject_id\x18\x01 \x01(\t\x12\x0e\n\x06\x66ilter\x18\x03 \x01(\t\x12\x11\n\tpage_size\x18\x04 \x01(\x05\x12\x12\n\npage_token\x18\x05 \x01(\t"a\n\x13ListIndexesResponse\x12\x31\n\x07indexes\x18\x01 \x03(\x0b\x32 .google.datastore.admin.v1.Index\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"\xa5\x01\n\x16IndexOperationMetadata\x12\x39\n\x06\x63ommon\x18\x01 \x01(\x0b\x32).google.datastore.admin.v1.CommonMetadata\x12>\n\x11progress_entities\x18\x02 \x01(\x0b\x32#.google.datastore.admin.v1.Progress\x12\x10\n\x08index_id\x18\x03 \x01(\t*}\n\rOperationType\x12\x1e\n\x1aOPERATION_TYPE_UNSPECIFIED\x10\x00\x12\x13\n\x0f\x45XPORT_ENTITIES\x10\x01\x12\x13\n\x0fIMPORT_ENTITIES\x10\x02\x12\x10\n\x0c\x43REATE_INDEX\x10\x03\x12\x10\n\x0c\x44\x45LETE_INDEX\x10\x04\x32\x9c\x07\n\x0e\x44\x61tastoreAdmin\x12\xf6\x01\n\x0e\x45xportEntities\x12\x30.google.datastore.admin.v1.ExportEntitiesRequest\x1a\x1d.google.longrunning.Operation"\x92\x01\x82\xd3\xe4\x93\x02%" /v1/projects/{project_id}:export:\x01*\xda\x41\x31project_id,labels,entity_filter,output_url_prefix\xca\x41\x30\n\x16\x45xportEntitiesResponse\x12\x16\x45xportEntitiesMetadata\x12\xed\x01\n\x0eImportEntities\x12\x30.google.datastore.admin.v1.ImportEntitiesRequest\x1a\x1d.google.longrunning.Operation"\x89\x01\x82\xd3\xe4\x93\x02%" /v1/projects/{project_id}:import:\x01*\xda\x41)project_id,labels,input_url,entity_filter\xca\x41/\n\x15google.protobuf.Empty\x12\x16ImportEntitiesMetadata\x12\x8e\x01\n\x08GetIndex\x12*.google.datastore.admin.v1.GetIndexRequest\x1a .google.datastore.admin.v1.Index"4\x82\xd3\xe4\x93\x02.\x12,/v1/projects/{project_id}/indexes/{index_id}\x12\x97\x01\n\x0bListIndexes\x12-.google.datastore.admin.v1.ListIndexesRequest\x1a..google.datastore.admin.v1.ListIndexesResponse")\x82\xd3\xe4\x93\x02#\x12!/v1/projects/{project_id}/indexes\x1av\xca\x41\x18\x64\x61tastore.googleapis.com\xd2\x41Xhttps://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/datastoreB\xbe\x01\n\x1d\x63om.google.datastore.admin.v1B\x13\x44\x61tastoreAdminProtoP\x01Z>google.golang.org/genproto/googleapis/datastore/admin/v1;admin\xaa\x02\x1fGoogle.Cloud.Datastore.Admin.V1\xea\x02#Google::Cloud::Datastore::Admin::V1b\x06proto3', + dependencies=[ + google_dot_api_dot_annotations__pb2.DESCRIPTOR, + google_dot_api_dot_client__pb2.DESCRIPTOR, + google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, + google_dot_cloud_dot_datastore__admin__v1_dot_proto_dot_index__pb2.DESCRIPTOR, + google_dot_longrunning_dot_operations__pb2.DESCRIPTOR, + google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, + ], +) + +_OPERATIONTYPE = _descriptor.EnumDescriptor( + name="OperationType", + full_name="google.datastore.admin.v1.OperationType", + filename=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + values=[ + _descriptor.EnumValueDescriptor( + name="OPERATION_TYPE_UNSPECIFIED", + index=0, + number=0, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, + ), + _descriptor.EnumValueDescriptor( + name="EXPORT_ENTITIES", + index=1, + number=1, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, + ), + _descriptor.EnumValueDescriptor( + name="IMPORT_ENTITIES", + index=2, + number=2, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, + ), + _descriptor.EnumValueDescriptor( + name="CREATE_INDEX", + index=3, + number=3, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, + ), + _descriptor.EnumValueDescriptor( + name="DELETE_INDEX", + index=4, + number=4, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, + ), + ], + containing_type=None, + serialized_options=None, + serialized_start=2515, + serialized_end=2640, +) +_sym_db.RegisterEnumDescriptor(_OPERATIONTYPE) + +OperationType = enum_type_wrapper.EnumTypeWrapper(_OPERATIONTYPE) +OPERATION_TYPE_UNSPECIFIED = 0 +EXPORT_ENTITIES = 1 +IMPORT_ENTITIES = 2 +CREATE_INDEX = 3 +DELETE_INDEX = 4 + + +_COMMONMETADATA_STATE = _descriptor.EnumDescriptor( + name="State", + full_name="google.datastore.admin.v1.CommonMetadata.State", + filename=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + values=[ + _descriptor.EnumValueDescriptor( + name="STATE_UNSPECIFIED", + index=0, + number=0, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, + ), + _descriptor.EnumValueDescriptor( + name="INITIALIZING", + index=1, + number=1, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, + ), + _descriptor.EnumValueDescriptor( + name="PROCESSING", + index=2, + number=2, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, + ), + _descriptor.EnumValueDescriptor( + name="CANCELLING", + index=3, + number=3, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, + ), + _descriptor.EnumValueDescriptor( + name="FINALIZING", + index=4, + number=4, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, + ), + _descriptor.EnumValueDescriptor( + name="SUCCESSFUL", + index=5, + number=5, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, + ), + _descriptor.EnumValueDescriptor( + name="FAILED", + index=6, + number=6, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, + ), + _descriptor.EnumValueDescriptor( + name="CANCELLED", + index=7, + number=7, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, + ), + ], + containing_type=None, + serialized_options=None, + serialized_start=661, + serialized_end=800, +) +_sym_db.RegisterEnumDescriptor(_COMMONMETADATA_STATE) + + +_COMMONMETADATA_LABELSENTRY = _descriptor.Descriptor( + name="LabelsEntry", + full_name="google.datastore.admin.v1.CommonMetadata.LabelsEntry", + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name="key", + full_name="google.datastore.admin.v1.CommonMetadata.LabelsEntry.key", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=b"".decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="value", + full_name="google.datastore.admin.v1.CommonMetadata.LabelsEntry.value", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=b"".decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=b"8\001", + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=613, + serialized_end=658, +) + +_COMMONMETADATA = _descriptor.Descriptor( + name="CommonMetadata", + full_name="google.datastore.admin.v1.CommonMetadata", + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name="start_time", + full_name="google.datastore.admin.v1.CommonMetadata.start_time", + index=0, + number=1, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="end_time", + full_name="google.datastore.admin.v1.CommonMetadata.end_time", + index=1, + number=2, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="operation_type", + full_name="google.datastore.admin.v1.CommonMetadata.operation_type", + index=2, + number=3, + type=14, + cpp_type=8, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="labels", + full_name="google.datastore.admin.v1.CommonMetadata.labels", + index=3, + number=4, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="state", + full_name="google.datastore.admin.v1.CommonMetadata.state", + index=4, + number=5, + type=14, + cpp_type=8, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + ], + extensions=[], + nested_types=[_COMMONMETADATA_LABELSENTRY,], + enum_types=[_COMMONMETADATA_STATE,], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=300, + serialized_end=800, +) + + +_PROGRESS = _descriptor.Descriptor( + name="Progress", + full_name="google.datastore.admin.v1.Progress", + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name="work_completed", + full_name="google.datastore.admin.v1.Progress.work_completed", + index=0, + number=1, + type=3, + cpp_type=2, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="work_estimated", + full_name="google.datastore.admin.v1.Progress.work_estimated", + index=1, + number=2, + type=3, + cpp_type=2, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=802, + serialized_end=860, +) + + +_EXPORTENTITIESREQUEST_LABELSENTRY = _descriptor.Descriptor( + name="LabelsEntry", + full_name="google.datastore.admin.v1.ExportEntitiesRequest.LabelsEntry", + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name="key", + full_name="google.datastore.admin.v1.ExportEntitiesRequest.LabelsEntry.key", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=b"".decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="value", + full_name="google.datastore.admin.v1.ExportEntitiesRequest.LabelsEntry.value", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=b"".decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=b"8\001", + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=613, + serialized_end=658, +) + +_EXPORTENTITIESREQUEST = _descriptor.Descriptor( + name="ExportEntitiesRequest", + full_name="google.datastore.admin.v1.ExportEntitiesRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name="project_id", + full_name="google.datastore.admin.v1.ExportEntitiesRequest.project_id", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=b"".decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=b"\340A\002", + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="labels", + full_name="google.datastore.admin.v1.ExportEntitiesRequest.labels", + index=1, + number=2, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="entity_filter", + full_name="google.datastore.admin.v1.ExportEntitiesRequest.entity_filter", + index=2, + number=3, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="output_url_prefix", + full_name="google.datastore.admin.v1.ExportEntitiesRequest.output_url_prefix", + index=3, + number=4, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=b"".decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=b"\340A\002", + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + ], + extensions=[], + nested_types=[_EXPORTENTITIESREQUEST_LABELSENTRY,], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=863, + serialized_end=1132, +) + + +_IMPORTENTITIESREQUEST_LABELSENTRY = _descriptor.Descriptor( + name="LabelsEntry", + full_name="google.datastore.admin.v1.ImportEntitiesRequest.LabelsEntry", + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name="key", + full_name="google.datastore.admin.v1.ImportEntitiesRequest.LabelsEntry.key", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=b"".decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="value", + full_name="google.datastore.admin.v1.ImportEntitiesRequest.LabelsEntry.value", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=b"".decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=b"8\001", + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=613, + serialized_end=658, +) + +_IMPORTENTITIESREQUEST = _descriptor.Descriptor( + name="ImportEntitiesRequest", + full_name="google.datastore.admin.v1.ImportEntitiesRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name="project_id", + full_name="google.datastore.admin.v1.ImportEntitiesRequest.project_id", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=b"".decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=b"\340A\002", + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="labels", + full_name="google.datastore.admin.v1.ImportEntitiesRequest.labels", + index=1, + number=2, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="input_url", + full_name="google.datastore.admin.v1.ImportEntitiesRequest.input_url", + index=2, + number=3, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=b"".decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=b"\340A\002", + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="entity_filter", + full_name="google.datastore.admin.v1.ImportEntitiesRequest.entity_filter", + index=3, + number=4, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + ], + extensions=[], + nested_types=[_IMPORTENTITIESREQUEST_LABELSENTRY,], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1135, + serialized_end=1396, +) + + +_EXPORTENTITIESRESPONSE = _descriptor.Descriptor( + name="ExportEntitiesResponse", + full_name="google.datastore.admin.v1.ExportEntitiesResponse", + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name="output_url", + full_name="google.datastore.admin.v1.ExportEntitiesResponse.output_url", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=b"".decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1398, + serialized_end=1442, +) + + +_EXPORTENTITIESMETADATA = _descriptor.Descriptor( + name="ExportEntitiesMetadata", + full_name="google.datastore.admin.v1.ExportEntitiesMetadata", + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name="common", + full_name="google.datastore.admin.v1.ExportEntitiesMetadata.common", + index=0, + number=1, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="progress_entities", + full_name="google.datastore.admin.v1.ExportEntitiesMetadata.progress_entities", + index=1, + number=2, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="progress_bytes", + full_name="google.datastore.admin.v1.ExportEntitiesMetadata.progress_bytes", + index=2, + number=3, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="entity_filter", + full_name="google.datastore.admin.v1.ExportEntitiesMetadata.entity_filter", + index=3, + number=4, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="output_url_prefix", + full_name="google.datastore.admin.v1.ExportEntitiesMetadata.output_url_prefix", + index=4, + number=5, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=b"".decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1445, + serialized_end=1744, +) + + +_IMPORTENTITIESMETADATA = _descriptor.Descriptor( + name="ImportEntitiesMetadata", + full_name="google.datastore.admin.v1.ImportEntitiesMetadata", + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name="common", + full_name="google.datastore.admin.v1.ImportEntitiesMetadata.common", + index=0, + number=1, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="progress_entities", + full_name="google.datastore.admin.v1.ImportEntitiesMetadata.progress_entities", + index=1, + number=2, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="progress_bytes", + full_name="google.datastore.admin.v1.ImportEntitiesMetadata.progress_bytes", + index=2, + number=3, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="entity_filter", + full_name="google.datastore.admin.v1.ImportEntitiesMetadata.entity_filter", + index=3, + number=4, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="input_url", + full_name="google.datastore.admin.v1.ImportEntitiesMetadata.input_url", + index=4, + number=5, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=b"".decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1747, + serialized_end=2038, +) + + +_ENTITYFILTER = _descriptor.Descriptor( + name="EntityFilter", + full_name="google.datastore.admin.v1.EntityFilter", + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name="kinds", + full_name="google.datastore.admin.v1.EntityFilter.kinds", + index=0, + number=1, + type=9, + cpp_type=9, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="namespace_ids", + full_name="google.datastore.admin.v1.EntityFilter.namespace_ids", + index=1, + number=2, + type=9, + cpp_type=9, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=2040, + serialized_end=2092, +) + + +_GETINDEXREQUEST = _descriptor.Descriptor( + name="GetIndexRequest", + full_name="google.datastore.admin.v1.GetIndexRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name="project_id", + full_name="google.datastore.admin.v1.GetIndexRequest.project_id", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=b"".decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="index_id", + full_name="google.datastore.admin.v1.GetIndexRequest.index_id", + index=1, + number=3, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=b"".decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=2094, + serialized_end=2149, +) + + +_LISTINDEXESREQUEST = _descriptor.Descriptor( + name="ListIndexesRequest", + full_name="google.datastore.admin.v1.ListIndexesRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name="project_id", + full_name="google.datastore.admin.v1.ListIndexesRequest.project_id", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=b"".decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="filter", + full_name="google.datastore.admin.v1.ListIndexesRequest.filter", + index=1, + number=3, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=b"".decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="page_size", + full_name="google.datastore.admin.v1.ListIndexesRequest.page_size", + index=2, + number=4, + type=5, + cpp_type=1, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="page_token", + full_name="google.datastore.admin.v1.ListIndexesRequest.page_token", + index=3, + number=5, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=b"".decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=2151, + serialized_end=2246, +) + + +_LISTINDEXESRESPONSE = _descriptor.Descriptor( + name="ListIndexesResponse", + full_name="google.datastore.admin.v1.ListIndexesResponse", + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name="indexes", + full_name="google.datastore.admin.v1.ListIndexesResponse.indexes", + index=0, + number=1, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="next_page_token", + full_name="google.datastore.admin.v1.ListIndexesResponse.next_page_token", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=b"".decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=2248, + serialized_end=2345, +) + + +_INDEXOPERATIONMETADATA = _descriptor.Descriptor( + name="IndexOperationMetadata", + full_name="google.datastore.admin.v1.IndexOperationMetadata", + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name="common", + full_name="google.datastore.admin.v1.IndexOperationMetadata.common", + index=0, + number=1, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="progress_entities", + full_name="google.datastore.admin.v1.IndexOperationMetadata.progress_entities", + index=1, + number=2, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="index_id", + full_name="google.datastore.admin.v1.IndexOperationMetadata.index_id", + index=2, + number=3, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=b"".decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=2348, + serialized_end=2513, +) + +_COMMONMETADATA_LABELSENTRY.containing_type = _COMMONMETADATA +_COMMONMETADATA.fields_by_name[ + "start_time" +].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_COMMONMETADATA.fields_by_name[ + "end_time" +].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_COMMONMETADATA.fields_by_name["operation_type"].enum_type = _OPERATIONTYPE +_COMMONMETADATA.fields_by_name["labels"].message_type = _COMMONMETADATA_LABELSENTRY +_COMMONMETADATA.fields_by_name["state"].enum_type = _COMMONMETADATA_STATE +_COMMONMETADATA_STATE.containing_type = _COMMONMETADATA +_EXPORTENTITIESREQUEST_LABELSENTRY.containing_type = _EXPORTENTITIESREQUEST +_EXPORTENTITIESREQUEST.fields_by_name[ + "labels" +].message_type = _EXPORTENTITIESREQUEST_LABELSENTRY +_EXPORTENTITIESREQUEST.fields_by_name["entity_filter"].message_type = _ENTITYFILTER +_IMPORTENTITIESREQUEST_LABELSENTRY.containing_type = _IMPORTENTITIESREQUEST +_IMPORTENTITIESREQUEST.fields_by_name[ + "labels" +].message_type = _IMPORTENTITIESREQUEST_LABELSENTRY +_IMPORTENTITIESREQUEST.fields_by_name["entity_filter"].message_type = _ENTITYFILTER +_EXPORTENTITIESMETADATA.fields_by_name["common"].message_type = _COMMONMETADATA +_EXPORTENTITIESMETADATA.fields_by_name["progress_entities"].message_type = _PROGRESS +_EXPORTENTITIESMETADATA.fields_by_name["progress_bytes"].message_type = _PROGRESS +_EXPORTENTITIESMETADATA.fields_by_name["entity_filter"].message_type = _ENTITYFILTER +_IMPORTENTITIESMETADATA.fields_by_name["common"].message_type = _COMMONMETADATA +_IMPORTENTITIESMETADATA.fields_by_name["progress_entities"].message_type = _PROGRESS +_IMPORTENTITIESMETADATA.fields_by_name["progress_bytes"].message_type = _PROGRESS +_IMPORTENTITIESMETADATA.fields_by_name["entity_filter"].message_type = _ENTITYFILTER +_LISTINDEXESRESPONSE.fields_by_name[ + "indexes" +].message_type = ( + google_dot_cloud_dot_datastore__admin__v1_dot_proto_dot_index__pb2._INDEX +) +_INDEXOPERATIONMETADATA.fields_by_name["common"].message_type = _COMMONMETADATA +_INDEXOPERATIONMETADATA.fields_by_name["progress_entities"].message_type = _PROGRESS +DESCRIPTOR.message_types_by_name["CommonMetadata"] = _COMMONMETADATA +DESCRIPTOR.message_types_by_name["Progress"] = _PROGRESS +DESCRIPTOR.message_types_by_name["ExportEntitiesRequest"] = _EXPORTENTITIESREQUEST +DESCRIPTOR.message_types_by_name["ImportEntitiesRequest"] = _IMPORTENTITIESREQUEST +DESCRIPTOR.message_types_by_name["ExportEntitiesResponse"] = _EXPORTENTITIESRESPONSE +DESCRIPTOR.message_types_by_name["ExportEntitiesMetadata"] = _EXPORTENTITIESMETADATA +DESCRIPTOR.message_types_by_name["ImportEntitiesMetadata"] = _IMPORTENTITIESMETADATA +DESCRIPTOR.message_types_by_name["EntityFilter"] = _ENTITYFILTER +DESCRIPTOR.message_types_by_name["GetIndexRequest"] = _GETINDEXREQUEST +DESCRIPTOR.message_types_by_name["ListIndexesRequest"] = _LISTINDEXESREQUEST +DESCRIPTOR.message_types_by_name["ListIndexesResponse"] = _LISTINDEXESRESPONSE +DESCRIPTOR.message_types_by_name["IndexOperationMetadata"] = _INDEXOPERATIONMETADATA +DESCRIPTOR.enum_types_by_name["OperationType"] = _OPERATIONTYPE +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + +CommonMetadata = _reflection.GeneratedProtocolMessageType( + "CommonMetadata", + (_message.Message,), + { + "LabelsEntry": _reflection.GeneratedProtocolMessageType( + "LabelsEntry", + (_message.Message,), + { + "DESCRIPTOR": _COMMONMETADATA_LABELSENTRY, + "__module__": "google.cloud.datastore_admin_v1.proto.datastore_admin_pb2" + # @@protoc_insertion_point(class_scope:google.datastore.admin.v1.CommonMetadata.LabelsEntry) + }, + ), + "DESCRIPTOR": _COMMONMETADATA, + "__module__": "google.cloud.datastore_admin_v1.proto.datastore_admin_pb2", + "__doc__": """Metadata common to all Datastore Admin operations. + + Attributes: + start_time: + The time that work began on the operation. + end_time: + The time the operation ended, either successfully or + otherwise. + operation_type: + The type of the operation. Can be used as a filter in + ListOperationsRequest. + labels: + The client-assigned labels which were provided when the + operation was created. May also include additional labels. + state: + The current state of the Operation. + """, + # @@protoc_insertion_point(class_scope:google.datastore.admin.v1.CommonMetadata) + }, +) +_sym_db.RegisterMessage(CommonMetadata) +_sym_db.RegisterMessage(CommonMetadata.LabelsEntry) + +Progress = _reflection.GeneratedProtocolMessageType( + "Progress", + (_message.Message,), + { + "DESCRIPTOR": _PROGRESS, + "__module__": "google.cloud.datastore_admin_v1.proto.datastore_admin_pb2", + "__doc__": """Measures the progress of a particular metric. + + Attributes: + work_completed: + The amount of work that has been completed. Note that this may + be greater than work_estimated. + work_estimated: + An estimate of how much work needs to be performed. May be + zero if the work estimate is unavailable. + """, + # @@protoc_insertion_point(class_scope:google.datastore.admin.v1.Progress) + }, +) +_sym_db.RegisterMessage(Progress) + +ExportEntitiesRequest = _reflection.GeneratedProtocolMessageType( + "ExportEntitiesRequest", + (_message.Message,), + { + "LabelsEntry": _reflection.GeneratedProtocolMessageType( + "LabelsEntry", + (_message.Message,), + { + "DESCRIPTOR": _EXPORTENTITIESREQUEST_LABELSENTRY, + "__module__": "google.cloud.datastore_admin_v1.proto.datastore_admin_pb2" + # @@protoc_insertion_point(class_scope:google.datastore.admin.v1.ExportEntitiesRequest.LabelsEntry) + }, + ), + "DESCRIPTOR": _EXPORTENTITIESREQUEST, + "__module__": "google.cloud.datastore_admin_v1.proto.datastore_admin_pb2", + "__doc__": """The request for [google.datastore.admin.v1.DatastoreAdmin.ExportEntiti + es][google.datastore.admin.v1.DatastoreAdmin.ExportEntities]. + + Attributes: + project_id: + Required. Project ID against which to make the request. + labels: + Client-assigned labels. + entity_filter: + Description of what data from the project is included in the + export. + output_url_prefix: + Required. Location for the export metadata and data files. + The full resource URL of the external storage location. + Currently, only Google Cloud Storage is supported. So + output_url_prefix should be of the form: + ``gs://BUCKET_NAME[/NAMESPACE_PATH]``, where ``BUCKET_NAME`` + is the name of the Cloud Storage bucket and ``NAMESPACE_PATH`` + is an optional Cloud Storage namespace path (this is not a + Cloud Datastore namespace). For more information about Cloud + Storage namespace paths, see `Object name considerations + `__. The resulting files will be nested deeper + than the specified URL prefix. The final output URL will be + provided in the [google.datastore.admin.v1.ExportEntitiesRespo + nse.output_url][google.datastore.admin.v1.ExportEntitiesRespon + se.output_url] field. That value should be used for subsequent + ImportEntities operations. By nesting the data files deeper, + the same Cloud Storage bucket can be used in multiple + ExportEntities operations without conflict. + """, + # @@protoc_insertion_point(class_scope:google.datastore.admin.v1.ExportEntitiesRequest) + }, +) +_sym_db.RegisterMessage(ExportEntitiesRequest) +_sym_db.RegisterMessage(ExportEntitiesRequest.LabelsEntry) + +ImportEntitiesRequest = _reflection.GeneratedProtocolMessageType( + "ImportEntitiesRequest", + (_message.Message,), + { + "LabelsEntry": _reflection.GeneratedProtocolMessageType( + "LabelsEntry", + (_message.Message,), + { + "DESCRIPTOR": _IMPORTENTITIESREQUEST_LABELSENTRY, + "__module__": "google.cloud.datastore_admin_v1.proto.datastore_admin_pb2" + # @@protoc_insertion_point(class_scope:google.datastore.admin.v1.ImportEntitiesRequest.LabelsEntry) + }, + ), + "DESCRIPTOR": _IMPORTENTITIESREQUEST, + "__module__": "google.cloud.datastore_admin_v1.proto.datastore_admin_pb2", + "__doc__": """The request for [google.datastore.admin.v1.DatastoreAdmin.ImportEntiti + es][google.datastore.admin.v1.DatastoreAdmin.ImportEntities]. + + Attributes: + project_id: + Required. Project ID against which to make the request. + labels: + Client-assigned labels. + input_url: + Required. The full resource URL of the external storage + location. Currently, only Google Cloud Storage is supported. + So input_url should be of the form: ``gs://BUCKET_NAME[/NAMESP + ACE_PATH]/OVERALL_EXPORT_METADATA_FILE``, where + ``BUCKET_NAME`` is the name of the Cloud Storage bucket, + ``NAMESPACE_PATH`` is an optional Cloud Storage namespace path + (this is not a Cloud Datastore namespace), and + ``OVERALL_EXPORT_METADATA_FILE`` is the metadata file written + by the ExportEntities operation. For more information about + Cloud Storage namespace paths, see `Object name considerations + `__. For more information, see [google.datasto + re.admin.v1.ExportEntitiesResponse.output_url][google.datastor + e.admin.v1.ExportEntitiesResponse.output_url]. + entity_filter: + Optionally specify which kinds/namespaces are to be imported. + If provided, the list must be a subset of the EntityFilter + used in creating the export, otherwise a FAILED_PRECONDITION + error will be returned. If no filter is specified then all + entities from the export are imported. + """, + # @@protoc_insertion_point(class_scope:google.datastore.admin.v1.ImportEntitiesRequest) + }, +) +_sym_db.RegisterMessage(ImportEntitiesRequest) +_sym_db.RegisterMessage(ImportEntitiesRequest.LabelsEntry) + +ExportEntitiesResponse = _reflection.GeneratedProtocolMessageType( + "ExportEntitiesResponse", + (_message.Message,), + { + "DESCRIPTOR": _EXPORTENTITIESRESPONSE, + "__module__": "google.cloud.datastore_admin_v1.proto.datastore_admin_pb2", + "__doc__": """The response for [google.datastore.admin.v1.DatastoreAdmin.ExportEntit + ies][google.datastore.admin.v1.DatastoreAdmin.ExportEntities]. + + Attributes: + output_url: + Location of the output metadata file. This can be used to + begin an import into Cloud Datastore (this project or another + project). See [google.datastore.admin.v1.ImportEntitiesRequest + .input_url][google.datastore.admin.v1.ImportEntitiesRequest.in + put_url]. Only present if the operation completed + successfully. + """, + # @@protoc_insertion_point(class_scope:google.datastore.admin.v1.ExportEntitiesResponse) + }, +) +_sym_db.RegisterMessage(ExportEntitiesResponse) + +ExportEntitiesMetadata = _reflection.GeneratedProtocolMessageType( + "ExportEntitiesMetadata", + (_message.Message,), + { + "DESCRIPTOR": _EXPORTENTITIESMETADATA, + "__module__": "google.cloud.datastore_admin_v1.proto.datastore_admin_pb2", + "__doc__": """Metadata for ExportEntities operations. + + Attributes: + common: + Metadata common to all Datastore Admin operations. + progress_entities: + An estimate of the number of entities processed. + progress_bytes: + An estimate of the number of bytes processed. + entity_filter: + Description of which entities are being exported. + output_url_prefix: + Location for the export metadata and data files. This will be + the same value as the [google.datastore.admin.v1.ExportEntitie + sRequest.output_url_prefix][google.datastore.admin.v1.ExportEn + titiesRequest.output_url_prefix] field. The final output + location is provided in [google.datastore.admin.v1.ExportEntit + iesResponse.output_url][google.datastore.admin.v1.ExportEntiti + esResponse.output_url]. + """, + # @@protoc_insertion_point(class_scope:google.datastore.admin.v1.ExportEntitiesMetadata) + }, +) +_sym_db.RegisterMessage(ExportEntitiesMetadata) + +ImportEntitiesMetadata = _reflection.GeneratedProtocolMessageType( + "ImportEntitiesMetadata", + (_message.Message,), + { + "DESCRIPTOR": _IMPORTENTITIESMETADATA, + "__module__": "google.cloud.datastore_admin_v1.proto.datastore_admin_pb2", + "__doc__": """Metadata for ImportEntities operations. + + Attributes: + common: + Metadata common to all Datastore Admin operations. + progress_entities: + An estimate of the number of entities processed. + progress_bytes: + An estimate of the number of bytes processed. + entity_filter: + Description of which entities are being imported. + input_url: + The location of the import metadata file. This will be the + same value as the [google.datastore.admin.v1.ExportEntitiesRes + ponse.output_url][google.datastore.admin.v1.ExportEntitiesResp + onse.output_url] field. + """, + # @@protoc_insertion_point(class_scope:google.datastore.admin.v1.ImportEntitiesMetadata) + }, +) +_sym_db.RegisterMessage(ImportEntitiesMetadata) + +EntityFilter = _reflection.GeneratedProtocolMessageType( + "EntityFilter", + (_message.Message,), + { + "DESCRIPTOR": _ENTITYFILTER, + "__module__": "google.cloud.datastore_admin_v1.proto.datastore_admin_pb2", + "__doc__": """Identifies a subset of entities in a project. This is specified as + combinations of kinds and namespaces (either or both of which may be + all, as described in the following examples). Example usage: Entire + project: kinds=[], namespace_ids=[] Kinds Foo and Bar in all + namespaces: kinds=[‘Foo’, ‘Bar’], namespace_ids=[] Kinds Foo and Bar + only in the default namespace: kinds=[‘Foo’, ‘Bar’], + namespace_ids=[’’] Kinds Foo and Bar in both the default and Baz + namespaces: kinds=[‘Foo’, ‘Bar’], namespace_ids=[’‘, ’Baz’] The + entire Baz namespace: kinds=[], namespace_ids=[‘Baz’] + + Attributes: + kinds: + If empty, then this represents all kinds. + namespace_ids: + An empty list represents all namespaces. This is the preferred + usage for projects that don’t use namespaces. An empty string + element represents the default namespace. This should be used + if the project has data in non-default namespaces, but doesn’t + want to include them. Each namespace in this list must be + unique. + """, + # @@protoc_insertion_point(class_scope:google.datastore.admin.v1.EntityFilter) + }, +) +_sym_db.RegisterMessage(EntityFilter) + +GetIndexRequest = _reflection.GeneratedProtocolMessageType( + "GetIndexRequest", + (_message.Message,), + { + "DESCRIPTOR": _GETINDEXREQUEST, + "__module__": "google.cloud.datastore_admin_v1.proto.datastore_admin_pb2", + "__doc__": """The request for [google.datastore.admin.v1.DatastoreAdmin.GetIndex][go + ogle.datastore.admin.v1.DatastoreAdmin.GetIndex]. + + Attributes: + project_id: + Project ID against which to make the request. + index_id: + The resource ID of the index to get. + """, + # @@protoc_insertion_point(class_scope:google.datastore.admin.v1.GetIndexRequest) + }, +) +_sym_db.RegisterMessage(GetIndexRequest) + +ListIndexesRequest = _reflection.GeneratedProtocolMessageType( + "ListIndexesRequest", + (_message.Message,), + { + "DESCRIPTOR": _LISTINDEXESREQUEST, + "__module__": "google.cloud.datastore_admin_v1.proto.datastore_admin_pb2", + "__doc__": """The request for [google.datastore.admin.v1.DatastoreAdmin.ListIndexes] + [google.datastore.admin.v1.DatastoreAdmin.ListIndexes]. + + Attributes: + project_id: + Project ID against which to make the request. + page_size: + The maximum number of items to return. If zero, then all + results will be returned. + page_token: + The next_page_token value returned from a previous List + request, if any. + """, + # @@protoc_insertion_point(class_scope:google.datastore.admin.v1.ListIndexesRequest) + }, +) +_sym_db.RegisterMessage(ListIndexesRequest) + +ListIndexesResponse = _reflection.GeneratedProtocolMessageType( + "ListIndexesResponse", + (_message.Message,), + { + "DESCRIPTOR": _LISTINDEXESRESPONSE, + "__module__": "google.cloud.datastore_admin_v1.proto.datastore_admin_pb2", + "__doc__": """The response for [google.datastore.admin.v1.DatastoreAdmin.ListIndexes + ][google.datastore.admin.v1.DatastoreAdmin.ListIndexes]. + + Attributes: + indexes: + The indexes. + next_page_token: + The standard List next-page token. + """, + # @@protoc_insertion_point(class_scope:google.datastore.admin.v1.ListIndexesResponse) + }, +) +_sym_db.RegisterMessage(ListIndexesResponse) + +IndexOperationMetadata = _reflection.GeneratedProtocolMessageType( + "IndexOperationMetadata", + (_message.Message,), + { + "DESCRIPTOR": _INDEXOPERATIONMETADATA, + "__module__": "google.cloud.datastore_admin_v1.proto.datastore_admin_pb2", + "__doc__": """Metadata for Index operations. + + Attributes: + common: + Metadata common to all Datastore Admin operations. + progress_entities: + An estimate of the number of entities processed. + index_id: + The index resource ID that this operation is acting on. + """, + # @@protoc_insertion_point(class_scope:google.datastore.admin.v1.IndexOperationMetadata) + }, +) +_sym_db.RegisterMessage(IndexOperationMetadata) + + +DESCRIPTOR._options = None +_COMMONMETADATA_LABELSENTRY._options = None +_EXPORTENTITIESREQUEST_LABELSENTRY._options = None +_EXPORTENTITIESREQUEST.fields_by_name["project_id"]._options = None +_EXPORTENTITIESREQUEST.fields_by_name["output_url_prefix"]._options = None +_IMPORTENTITIESREQUEST_LABELSENTRY._options = None +_IMPORTENTITIESREQUEST.fields_by_name["project_id"]._options = None +_IMPORTENTITIESREQUEST.fields_by_name["input_url"]._options = None + +_DATASTOREADMIN = _descriptor.ServiceDescriptor( + name="DatastoreAdmin", + full_name="google.datastore.admin.v1.DatastoreAdmin", + file=DESCRIPTOR, + index=0, + serialized_options=b"\312A\030datastore.googleapis.com\322AXhttps://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/datastore", + create_key=_descriptor._internal_create_key, + serialized_start=2643, + serialized_end=3567, + methods=[ + _descriptor.MethodDescriptor( + name="ExportEntities", + full_name="google.datastore.admin.v1.DatastoreAdmin.ExportEntities", + index=0, + containing_service=None, + input_type=_EXPORTENTITIESREQUEST, + output_type=google_dot_longrunning_dot_operations__pb2._OPERATION, + serialized_options=b'\202\323\344\223\002%" /v1/projects/{project_id}:export:\001*\332A1project_id,labels,entity_filter,output_url_prefix\312A0\n\026ExportEntitiesResponse\022\026ExportEntitiesMetadata', + create_key=_descriptor._internal_create_key, + ), + _descriptor.MethodDescriptor( + name="ImportEntities", + full_name="google.datastore.admin.v1.DatastoreAdmin.ImportEntities", + index=1, + containing_service=None, + input_type=_IMPORTENTITIESREQUEST, + output_type=google_dot_longrunning_dot_operations__pb2._OPERATION, + serialized_options=b'\202\323\344\223\002%" /v1/projects/{project_id}:import:\001*\332A)project_id,labels,input_url,entity_filter\312A/\n\025google.protobuf.Empty\022\026ImportEntitiesMetadata', + create_key=_descriptor._internal_create_key, + ), + _descriptor.MethodDescriptor( + name="GetIndex", + full_name="google.datastore.admin.v1.DatastoreAdmin.GetIndex", + index=2, + containing_service=None, + input_type=_GETINDEXREQUEST, + output_type=google_dot_cloud_dot_datastore__admin__v1_dot_proto_dot_index__pb2._INDEX, + serialized_options=b"\202\323\344\223\002.\022,/v1/projects/{project_id}/indexes/{index_id}", + create_key=_descriptor._internal_create_key, + ), + _descriptor.MethodDescriptor( + name="ListIndexes", + full_name="google.datastore.admin.v1.DatastoreAdmin.ListIndexes", + index=3, + containing_service=None, + input_type=_LISTINDEXESREQUEST, + output_type=_LISTINDEXESRESPONSE, + serialized_options=b"\202\323\344\223\002#\022!/v1/projects/{project_id}/indexes", + create_key=_descriptor._internal_create_key, + ), + ], +) +_sym_db.RegisterServiceDescriptor(_DATASTOREADMIN) + +DESCRIPTOR.services_by_name["DatastoreAdmin"] = _DATASTOREADMIN + +# @@protoc_insertion_point(module_scope) diff --git a/google/cloud/datastore_admin_v1/proto/datastore_admin_pb2_grpc.py b/google/cloud/datastore_admin_v1/proto/datastore_admin_pb2_grpc.py new file mode 100644 index 00000000..177889e1 --- /dev/null +++ b/google/cloud/datastore_admin_v1/proto/datastore_admin_pb2_grpc.py @@ -0,0 +1,414 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc + +from google.cloud.datastore_admin_v1.proto import ( + datastore_admin_pb2 as google_dot_cloud_dot_datastore__admin__v1_dot_proto_dot_datastore__admin__pb2, +) +from google.cloud.datastore_admin_v1.proto import ( + index_pb2 as google_dot_cloud_dot_datastore__admin__v1_dot_proto_dot_index__pb2, +) +from google.longrunning import ( + operations_pb2 as google_dot_longrunning_dot_operations__pb2, +) + + +class DatastoreAdminStub(object): + """Google Cloud Datastore Admin API + + + The Datastore Admin API provides several admin services for Cloud Datastore. + + ----------------------------------------------------------------------------- + ## Concepts + + Project, namespace, kind, and entity as defined in the Google Cloud Datastore + API. + + Operation: An Operation represents work being performed in the background. + + EntityFilter: Allows specifying a subset of entities in a project. This is + specified as a combination of kinds and namespaces (either or both of which + may be all). + + ----------------------------------------------------------------------------- + ## Services + + # Export/Import + + The Export/Import service provides the ability to copy all or a subset of + entities to/from Google Cloud Storage. + + Exported data may be imported into Cloud Datastore for any Google Cloud + Platform project. It is not restricted to the export source project. It is + possible to export from one project and then import into another. + + Exported data can also be loaded into Google BigQuery for analysis. + + Exports and imports are performed asynchronously. An Operation resource is + created for each export/import. The state (including any errors encountered) + of the export/import may be queried via the Operation resource. + + # Index + + The index service manages Cloud Datastore composite indexes. + + Index creation and deletion are performed asynchronously. + An Operation resource is created for each such asynchronous operation. + The state of the operation (including any errors encountered) + may be queried via the Operation resource. + + # Operation + + The Operations collection provides a record of actions performed for the + specified project (including any operations in progress). Operations are not + created directly but through calls on other collections or resources. + + An operation that is not yet done may be cancelled. The request to cancel is + asynchronous and the operation may continue to run for some time after the + request to cancel is made. + + An operation that is done may be deleted so that it is no longer listed as + part of the Operation collection. + + ListOperations returns all pending operations, but not completed operations. + + Operations are created by service DatastoreAdmin, + but are accessed via service google.longrunning.Operations. + """ + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.ExportEntities = channel.unary_unary( + "/google.datastore.admin.v1.DatastoreAdmin/ExportEntities", + request_serializer=google_dot_cloud_dot_datastore__admin__v1_dot_proto_dot_datastore__admin__pb2.ExportEntitiesRequest.SerializeToString, + response_deserializer=google_dot_longrunning_dot_operations__pb2.Operation.FromString, + ) + self.ImportEntities = channel.unary_unary( + "/google.datastore.admin.v1.DatastoreAdmin/ImportEntities", + request_serializer=google_dot_cloud_dot_datastore__admin__v1_dot_proto_dot_datastore__admin__pb2.ImportEntitiesRequest.SerializeToString, + response_deserializer=google_dot_longrunning_dot_operations__pb2.Operation.FromString, + ) + self.GetIndex = channel.unary_unary( + "/google.datastore.admin.v1.DatastoreAdmin/GetIndex", + request_serializer=google_dot_cloud_dot_datastore__admin__v1_dot_proto_dot_datastore__admin__pb2.GetIndexRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_datastore__admin__v1_dot_proto_dot_index__pb2.Index.FromString, + ) + self.ListIndexes = channel.unary_unary( + "/google.datastore.admin.v1.DatastoreAdmin/ListIndexes", + request_serializer=google_dot_cloud_dot_datastore__admin__v1_dot_proto_dot_datastore__admin__pb2.ListIndexesRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_datastore__admin__v1_dot_proto_dot_datastore__admin__pb2.ListIndexesResponse.FromString, + ) + + +class DatastoreAdminServicer(object): + """Google Cloud Datastore Admin API + + + The Datastore Admin API provides several admin services for Cloud Datastore. + + ----------------------------------------------------------------------------- + ## Concepts + + Project, namespace, kind, and entity as defined in the Google Cloud Datastore + API. + + Operation: An Operation represents work being performed in the background. + + EntityFilter: Allows specifying a subset of entities in a project. This is + specified as a combination of kinds and namespaces (either or both of which + may be all). + + ----------------------------------------------------------------------------- + ## Services + + # Export/Import + + The Export/Import service provides the ability to copy all or a subset of + entities to/from Google Cloud Storage. + + Exported data may be imported into Cloud Datastore for any Google Cloud + Platform project. It is not restricted to the export source project. It is + possible to export from one project and then import into another. + + Exported data can also be loaded into Google BigQuery for analysis. + + Exports and imports are performed asynchronously. An Operation resource is + created for each export/import. The state (including any errors encountered) + of the export/import may be queried via the Operation resource. + + # Index + + The index service manages Cloud Datastore composite indexes. + + Index creation and deletion are performed asynchronously. + An Operation resource is created for each such asynchronous operation. + The state of the operation (including any errors encountered) + may be queried via the Operation resource. + + # Operation + + The Operations collection provides a record of actions performed for the + specified project (including any operations in progress). Operations are not + created directly but through calls on other collections or resources. + + An operation that is not yet done may be cancelled. The request to cancel is + asynchronous and the operation may continue to run for some time after the + request to cancel is made. + + An operation that is done may be deleted so that it is no longer listed as + part of the Operation collection. + + ListOperations returns all pending operations, but not completed operations. + + Operations are created by service DatastoreAdmin, + but are accessed via service google.longrunning.Operations. + """ + + def ExportEntities(self, request, context): + """Exports a copy of all or a subset of entities from Google Cloud Datastore + to another storage system, such as Google Cloud Storage. Recent updates to + entities may not be reflected in the export. The export occurs in the + background and its progress can be monitored and managed via the + Operation resource that is created. The output of an export may only be + used once the associated operation is done. If an export operation is + cancelled before completion it may leave partial data behind in Google + Cloud Storage. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + def ImportEntities(self, request, context): + """Imports entities into Google Cloud Datastore. Existing entities with the + same key are overwritten. The import occurs in the background and its + progress can be monitored and managed via the Operation resource that is + created. If an ImportEntities operation is cancelled, it is possible + that a subset of the data has already been imported to Cloud Datastore. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + def GetIndex(self, request, context): + """Gets an index. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + def ListIndexes(self, request, context): + """Lists the indexes that match the specified filters. Datastore uses an + eventually consistent query to fetch the list of indexes and may + occasionally return stale results. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + +def add_DatastoreAdminServicer_to_server(servicer, server): + rpc_method_handlers = { + "ExportEntities": grpc.unary_unary_rpc_method_handler( + servicer.ExportEntities, + request_deserializer=google_dot_cloud_dot_datastore__admin__v1_dot_proto_dot_datastore__admin__pb2.ExportEntitiesRequest.FromString, + response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString, + ), + "ImportEntities": grpc.unary_unary_rpc_method_handler( + servicer.ImportEntities, + request_deserializer=google_dot_cloud_dot_datastore__admin__v1_dot_proto_dot_datastore__admin__pb2.ImportEntitiesRequest.FromString, + response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString, + ), + "GetIndex": grpc.unary_unary_rpc_method_handler( + servicer.GetIndex, + request_deserializer=google_dot_cloud_dot_datastore__admin__v1_dot_proto_dot_datastore__admin__pb2.GetIndexRequest.FromString, + response_serializer=google_dot_cloud_dot_datastore__admin__v1_dot_proto_dot_index__pb2.Index.SerializeToString, + ), + "ListIndexes": grpc.unary_unary_rpc_method_handler( + servicer.ListIndexes, + request_deserializer=google_dot_cloud_dot_datastore__admin__v1_dot_proto_dot_datastore__admin__pb2.ListIndexesRequest.FromString, + response_serializer=google_dot_cloud_dot_datastore__admin__v1_dot_proto_dot_datastore__admin__pb2.ListIndexesResponse.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + "google.datastore.admin.v1.DatastoreAdmin", rpc_method_handlers + ) + server.add_generic_rpc_handlers((generic_handler,)) + + +# This class is part of an EXPERIMENTAL API. +class DatastoreAdmin(object): + """Google Cloud Datastore Admin API + + + The Datastore Admin API provides several admin services for Cloud Datastore. + + ----------------------------------------------------------------------------- + ## Concepts + + Project, namespace, kind, and entity as defined in the Google Cloud Datastore + API. + + Operation: An Operation represents work being performed in the background. + + EntityFilter: Allows specifying a subset of entities in a project. This is + specified as a combination of kinds and namespaces (either or both of which + may be all). + + ----------------------------------------------------------------------------- + ## Services + + # Export/Import + + The Export/Import service provides the ability to copy all or a subset of + entities to/from Google Cloud Storage. + + Exported data may be imported into Cloud Datastore for any Google Cloud + Platform project. It is not restricted to the export source project. It is + possible to export from one project and then import into another. + + Exported data can also be loaded into Google BigQuery for analysis. + + Exports and imports are performed asynchronously. An Operation resource is + created for each export/import. The state (including any errors encountered) + of the export/import may be queried via the Operation resource. + + # Index + + The index service manages Cloud Datastore composite indexes. + + Index creation and deletion are performed asynchronously. + An Operation resource is created for each such asynchronous operation. + The state of the operation (including any errors encountered) + may be queried via the Operation resource. + + # Operation + + The Operations collection provides a record of actions performed for the + specified project (including any operations in progress). Operations are not + created directly but through calls on other collections or resources. + + An operation that is not yet done may be cancelled. The request to cancel is + asynchronous and the operation may continue to run for some time after the + request to cancel is made. + + An operation that is done may be deleted so that it is no longer listed as + part of the Operation collection. + + ListOperations returns all pending operations, but not completed operations. + + Operations are created by service DatastoreAdmin, + but are accessed via service google.longrunning.Operations. + """ + + @staticmethod + def ExportEntities( + request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): + return grpc.experimental.unary_unary( + request, + target, + "/google.datastore.admin.v1.DatastoreAdmin/ExportEntities", + google_dot_cloud_dot_datastore__admin__v1_dot_proto_dot_datastore__admin__pb2.ExportEntitiesRequest.SerializeToString, + google_dot_longrunning_dot_operations__pb2.Operation.FromString, + options, + channel_credentials, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + ) + + @staticmethod + def ImportEntities( + request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): + return grpc.experimental.unary_unary( + request, + target, + "/google.datastore.admin.v1.DatastoreAdmin/ImportEntities", + google_dot_cloud_dot_datastore__admin__v1_dot_proto_dot_datastore__admin__pb2.ImportEntitiesRequest.SerializeToString, + google_dot_longrunning_dot_operations__pb2.Operation.FromString, + options, + channel_credentials, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + ) + + @staticmethod + def GetIndex( + request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): + return grpc.experimental.unary_unary( + request, + target, + "/google.datastore.admin.v1.DatastoreAdmin/GetIndex", + google_dot_cloud_dot_datastore__admin__v1_dot_proto_dot_datastore__admin__pb2.GetIndexRequest.SerializeToString, + google_dot_cloud_dot_datastore__admin__v1_dot_proto_dot_index__pb2.Index.FromString, + options, + channel_credentials, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + ) + + @staticmethod + def ListIndexes( + request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): + return grpc.experimental.unary_unary( + request, + target, + "/google.datastore.admin.v1.DatastoreAdmin/ListIndexes", + google_dot_cloud_dot_datastore__admin__v1_dot_proto_dot_datastore__admin__pb2.ListIndexesRequest.SerializeToString, + google_dot_cloud_dot_datastore__admin__v1_dot_proto_dot_datastore__admin__pb2.ListIndexesResponse.FromString, + options, + channel_credentials, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + ) diff --git a/google/cloud/datastore_admin_v1/proto/index.proto b/google/cloud/datastore_admin_v1/proto/index.proto new file mode 100644 index 00000000..96c2278b --- /dev/null +++ b/google/cloud/datastore_admin_v1/proto/index.proto @@ -0,0 +1,115 @@ +// Copyright 2019 Google LLC. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.datastore.admin.v1; + +import "google/api/field_behavior.proto"; +import "google/api/annotations.proto"; + +option csharp_namespace = "Google.Cloud.Datastore.Admin.V1"; +option go_package = "google.golang.org/genproto/googleapis/datastore/admin/v1;admin"; +option java_multiple_files = true; +option java_outer_classname = "IndexProto"; +option java_package = "com.google.datastore.admin.v1"; +option ruby_package = "Google::Cloud::Datastore::Admin::V1"; + +// A minimal index definition. +message Index { + // A property of an index. + message IndexedProperty { + // Required. The property name to index. + string name = 1 [(google.api.field_behavior) = REQUIRED]; + + // Required. The indexed property's direction. Must not be DIRECTION_UNSPECIFIED. + Direction direction = 2 [(google.api.field_behavior) = REQUIRED]; + } + + // For an ordered index, specifies whether each of the entity's ancestors + // will be included. + enum AncestorMode { + // The ancestor mode is unspecified. + ANCESTOR_MODE_UNSPECIFIED = 0; + + // Do not include the entity's ancestors in the index. + NONE = 1; + + // Include all the entity's ancestors in the index. + ALL_ANCESTORS = 2; + } + + // The direction determines how a property is indexed. + enum Direction { + // The direction is unspecified. + DIRECTION_UNSPECIFIED = 0; + + // The property's values are indexed so as to support sequencing in + // ascending order and also query by <, >, <=, >=, and =. + ASCENDING = 1; + + // The property's values are indexed so as to support sequencing in + // descending order and also query by <, >, <=, >=, and =. + DESCENDING = 2; + } + + // The possible set of states of an index. + enum State { + // The state is unspecified. + STATE_UNSPECIFIED = 0; + + // The index is being created, and cannot be used by queries. + // There is an active long-running operation for the index. + // The index is updated when writing an entity. + // Some index data may exist. + CREATING = 1; + + // The index is ready to be used. + // The index is updated when writing an entity. + // The index is fully populated from all stored entities it applies to. + READY = 2; + + // The index is being deleted, and cannot be used by queries. + // There is an active long-running operation for the index. + // The index is not updated when writing an entity. + // Some index data may exist. + DELETING = 3; + + // The index was being created or deleted, but something went wrong. + // The index cannot by used by queries. + // There is no active long-running operation for the index, + // and the most recently finished long-running operation failed. + // The index is not updated when writing an entity. + // Some index data may exist. + ERROR = 4; + } + + // Output only. Project ID. + string project_id = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. The resource ID of the index. + string index_id = 3 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Required. The entity kind to which this index applies. + string kind = 4 [(google.api.field_behavior) = REQUIRED]; + + // Required. The index's ancestor mode. Must not be ANCESTOR_MODE_UNSPECIFIED. + AncestorMode ancestor = 5 [(google.api.field_behavior) = REQUIRED]; + + // Required. An ordered sequence of property names and their index attributes. + repeated IndexedProperty properties = 6 [(google.api.field_behavior) = REQUIRED]; + + // Output only. The state of the index. + State state = 7 [(google.api.field_behavior) = OUTPUT_ONLY]; +} diff --git a/google/cloud/datastore_admin_v1/proto/index_pb2.py b/google/cloud/datastore_admin_v1/proto/index_pb2.py new file mode 100644 index 00000000..c1ccb034 --- /dev/null +++ b/google/cloud/datastore_admin_v1/proto/index_pb2.py @@ -0,0 +1,430 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/cloud/datastore_admin_v1/proto/index.proto +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database + +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 +from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 + + +DESCRIPTOR = _descriptor.FileDescriptor( + name="google/cloud/datastore_admin_v1/proto/index.proto", + package="google.datastore.admin.v1", + syntax="proto3", + serialized_options=b"\n\035com.google.datastore.admin.v1B\nIndexProtoP\001Z>google.golang.org/genproto/googleapis/datastore/admin/v1;admin\252\002\037Google.Cloud.Datastore.Admin.V1\352\002#Google::Cloud::Datastore::Admin::V1", + create_key=_descriptor._internal_create_key, + serialized_pb=b'\n1google/cloud/datastore_admin_v1/proto/index.proto\x12\x19google.datastore.admin.v1\x1a\x1fgoogle/api/field_behavior.proto\x1a\x1cgoogle/api/annotations.proto"\xe6\x04\n\x05Index\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x03\x12\x15\n\x08index_id\x18\x03 \x01(\tB\x03\xe0\x41\x03\x12\x11\n\x04kind\x18\x04 \x01(\tB\x03\xe0\x41\x02\x12\x44\n\x08\x61ncestor\x18\x05 \x01(\x0e\x32-.google.datastore.admin.v1.Index.AncestorModeB\x03\xe0\x41\x02\x12I\n\nproperties\x18\x06 \x03(\x0b\x32\x30.google.datastore.admin.v1.Index.IndexedPropertyB\x03\xe0\x41\x02\x12:\n\x05state\x18\x07 \x01(\x0e\x32&.google.datastore.admin.v1.Index.StateB\x03\xe0\x41\x03\x1ah\n\x0fIndexedProperty\x12\x11\n\x04name\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x42\n\tdirection\x18\x02 \x01(\x0e\x32*.google.datastore.admin.v1.Index.DirectionB\x03\xe0\x41\x02"J\n\x0c\x41ncestorMode\x12\x1d\n\x19\x41NCESTOR_MODE_UNSPECIFIED\x10\x00\x12\x08\n\x04NONE\x10\x01\x12\x11\n\rALL_ANCESTORS\x10\x02"E\n\tDirection\x12\x19\n\x15\x44IRECTION_UNSPECIFIED\x10\x00\x12\r\n\tASCENDING\x10\x01\x12\x0e\n\nDESCENDING\x10\x02"P\n\x05State\x12\x15\n\x11STATE_UNSPECIFIED\x10\x00\x12\x0c\n\x08\x43REATING\x10\x01\x12\t\n\x05READY\x10\x02\x12\x0c\n\x08\x44\x45LETING\x10\x03\x12\t\n\x05\x45RROR\x10\x04\x42\xb5\x01\n\x1d\x63om.google.datastore.admin.v1B\nIndexProtoP\x01Z>google.golang.org/genproto/googleapis/datastore/admin/v1;admin\xaa\x02\x1fGoogle.Cloud.Datastore.Admin.V1\xea\x02#Google::Cloud::Datastore::Admin::V1b\x06proto3', + dependencies=[ + google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, + google_dot_api_dot_annotations__pb2.DESCRIPTOR, + ], +) + + +_INDEX_ANCESTORMODE = _descriptor.EnumDescriptor( + name="AncestorMode", + full_name="google.datastore.admin.v1.Index.AncestorMode", + filename=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + values=[ + _descriptor.EnumValueDescriptor( + name="ANCESTOR_MODE_UNSPECIFIED", + index=0, + number=0, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, + ), + _descriptor.EnumValueDescriptor( + name="NONE", + index=1, + number=1, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, + ), + _descriptor.EnumValueDescriptor( + name="ALL_ANCESTORS", + index=2, + number=2, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, + ), + ], + containing_type=None, + serialized_options=None, + serialized_start=531, + serialized_end=605, +) +_sym_db.RegisterEnumDescriptor(_INDEX_ANCESTORMODE) + +_INDEX_DIRECTION = _descriptor.EnumDescriptor( + name="Direction", + full_name="google.datastore.admin.v1.Index.Direction", + filename=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + values=[ + _descriptor.EnumValueDescriptor( + name="DIRECTION_UNSPECIFIED", + index=0, + number=0, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, + ), + _descriptor.EnumValueDescriptor( + name="ASCENDING", + index=1, + number=1, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, + ), + _descriptor.EnumValueDescriptor( + name="DESCENDING", + index=2, + number=2, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, + ), + ], + containing_type=None, + serialized_options=None, + serialized_start=607, + serialized_end=676, +) +_sym_db.RegisterEnumDescriptor(_INDEX_DIRECTION) + +_INDEX_STATE = _descriptor.EnumDescriptor( + name="State", + full_name="google.datastore.admin.v1.Index.State", + filename=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + values=[ + _descriptor.EnumValueDescriptor( + name="STATE_UNSPECIFIED", + index=0, + number=0, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, + ), + _descriptor.EnumValueDescriptor( + name="CREATING", + index=1, + number=1, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, + ), + _descriptor.EnumValueDescriptor( + name="READY", + index=2, + number=2, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, + ), + _descriptor.EnumValueDescriptor( + name="DELETING", + index=3, + number=3, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, + ), + _descriptor.EnumValueDescriptor( + name="ERROR", + index=4, + number=4, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, + ), + ], + containing_type=None, + serialized_options=None, + serialized_start=678, + serialized_end=758, +) +_sym_db.RegisterEnumDescriptor(_INDEX_STATE) + + +_INDEX_INDEXEDPROPERTY = _descriptor.Descriptor( + name="IndexedProperty", + full_name="google.datastore.admin.v1.Index.IndexedProperty", + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name="name", + full_name="google.datastore.admin.v1.Index.IndexedProperty.name", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=b"".decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=b"\340A\002", + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="direction", + full_name="google.datastore.admin.v1.Index.IndexedProperty.direction", + index=1, + number=2, + type=14, + cpp_type=8, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=b"\340A\002", + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=425, + serialized_end=529, +) + +_INDEX = _descriptor.Descriptor( + name="Index", + full_name="google.datastore.admin.v1.Index", + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name="project_id", + full_name="google.datastore.admin.v1.Index.project_id", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=b"".decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=b"\340A\003", + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="index_id", + full_name="google.datastore.admin.v1.Index.index_id", + index=1, + number=3, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=b"".decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=b"\340A\003", + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="kind", + full_name="google.datastore.admin.v1.Index.kind", + index=2, + number=4, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=b"".decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=b"\340A\002", + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="ancestor", + full_name="google.datastore.admin.v1.Index.ancestor", + index=3, + number=5, + type=14, + cpp_type=8, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=b"\340A\002", + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="properties", + full_name="google.datastore.admin.v1.Index.properties", + index=4, + number=6, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=b"\340A\002", + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="state", + full_name="google.datastore.admin.v1.Index.state", + index=5, + number=7, + type=14, + cpp_type=8, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=b"\340A\003", + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + ], + extensions=[], + nested_types=[_INDEX_INDEXEDPROPERTY,], + enum_types=[_INDEX_ANCESTORMODE, _INDEX_DIRECTION, _INDEX_STATE,], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=144, + serialized_end=758, +) + +_INDEX_INDEXEDPROPERTY.fields_by_name["direction"].enum_type = _INDEX_DIRECTION +_INDEX_INDEXEDPROPERTY.containing_type = _INDEX +_INDEX.fields_by_name["ancestor"].enum_type = _INDEX_ANCESTORMODE +_INDEX.fields_by_name["properties"].message_type = _INDEX_INDEXEDPROPERTY +_INDEX.fields_by_name["state"].enum_type = _INDEX_STATE +_INDEX_ANCESTORMODE.containing_type = _INDEX +_INDEX_DIRECTION.containing_type = _INDEX +_INDEX_STATE.containing_type = _INDEX +DESCRIPTOR.message_types_by_name["Index"] = _INDEX +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + +Index = _reflection.GeneratedProtocolMessageType( + "Index", + (_message.Message,), + { + "IndexedProperty": _reflection.GeneratedProtocolMessageType( + "IndexedProperty", + (_message.Message,), + { + "DESCRIPTOR": _INDEX_INDEXEDPROPERTY, + "__module__": "google.cloud.datastore_admin_v1.proto.index_pb2", + "__doc__": """A property of an index. + + Attributes: + name: + Required. The property name to index. + direction: + Required. The indexed property’s direction. Must not be + DIRECTION_UNSPECIFIED. + """, + # @@protoc_insertion_point(class_scope:google.datastore.admin.v1.Index.IndexedProperty) + }, + ), + "DESCRIPTOR": _INDEX, + "__module__": "google.cloud.datastore_admin_v1.proto.index_pb2", + "__doc__": """A minimal index definition. + + Attributes: + project_id: + Output only. Project ID. + index_id: + Output only. The resource ID of the index. + kind: + Required. The entity kind to which this index applies. + ancestor: + Required. The index’s ancestor mode. Must not be + ANCESTOR_MODE_UNSPECIFIED. + properties: + Required. An ordered sequence of property names and their + index attributes. + state: + Output only. The state of the index. + """, + # @@protoc_insertion_point(class_scope:google.datastore.admin.v1.Index) + }, +) +_sym_db.RegisterMessage(Index) +_sym_db.RegisterMessage(Index.IndexedProperty) + + +DESCRIPTOR._options = None +_INDEX_INDEXEDPROPERTY.fields_by_name["name"]._options = None +_INDEX_INDEXEDPROPERTY.fields_by_name["direction"]._options = None +_INDEX.fields_by_name["project_id"]._options = None +_INDEX.fields_by_name["index_id"]._options = None +_INDEX.fields_by_name["kind"]._options = None +_INDEX.fields_by_name["ancestor"]._options = None +_INDEX.fields_by_name["properties"]._options = None +_INDEX.fields_by_name["state"]._options = None +# @@protoc_insertion_point(module_scope) diff --git a/google/cloud/datastore_admin_v1/proto/index_pb2_grpc.py b/google/cloud/datastore_admin_v1/proto/index_pb2_grpc.py new file mode 100644 index 00000000..8a939394 --- /dev/null +++ b/google/cloud/datastore_admin_v1/proto/index_pb2_grpc.py @@ -0,0 +1,3 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc diff --git a/google/cloud/datastore_v1/gapic/datastore_client.py b/google/cloud/datastore_v1/gapic/datastore_client.py index 12958c41..5f9b530f 100644 --- a/google/cloud/datastore_v1/gapic/datastore_client.py +++ b/google/cloud/datastore_v1/gapic/datastore_client.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2019 Google LLC +# Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -39,7 +39,7 @@ _GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution( - "google-cloud-datastore" + "google-cloud-datastore", ).version @@ -167,12 +167,12 @@ def __init__( self.transport = transport else: self.transport = datastore_grpc_transport.DatastoreGrpcTransport( - address=api_endpoint, channel=channel, credentials=credentials + address=api_endpoint, channel=channel, credentials=credentials, ) if client_info is None: client_info = google.api_core.gapic_v1.client_info.ClientInfo( - gapic_version=_GAPIC_LIBRARY_VERSION + gapic_version=_GAPIC_LIBRARY_VERSION, ) else: client_info.gapic_version = _GAPIC_LIBRARY_VERSION @@ -183,7 +183,7 @@ def __init__( # (Ordinarily, these are the defaults specified in the `*_config.py` # file next to this one.) self._method_configs = google.api_core.gapic_v1.config.parse_method_configs( - client_config["interfaces"][self._INTERFACE_NAME] + client_config["interfaces"][self._INTERFACE_NAME], ) # Save a dictionary of cached API call functions. @@ -219,8 +219,8 @@ def lookup( >>> response = client.lookup(project_id, keys) Args: - project_id (str): The ID of the project against which to make the request. - keys (list[Union[dict, ~google.cloud.datastore_v1.types.Key]]): Keys of entities to look up. + project_id (str): Required. The ID of the project against which to make the request. + keys (list[Union[dict, ~google.cloud.datastore_v1.types.Key]]): Required. Keys of entities to look up. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.datastore_v1.types.Key` @@ -259,7 +259,7 @@ def lookup( ) request = datastore_pb2.LookupRequest( - project_id=project_id, keys=keys, read_options=read_options + project_id=project_id, keys=keys, read_options=read_options, ) if metadata is None: metadata = [] @@ -281,7 +281,7 @@ def lookup( def run_query( self, project_id, - partition_id, + partition_id=None, read_options=None, query=None, gql_query=None, @@ -300,13 +300,10 @@ def run_query( >>> # TODO: Initialize `project_id`: >>> project_id = '' >>> - >>> # TODO: Initialize `partition_id`: - >>> partition_id = {} - >>> - >>> response = client.run_query(project_id, partition_id) + >>> response = client.run_query(project_id) Args: - project_id (str): The ID of the project against which to make the request. + project_id (str): Required. The ID of the project against which to make the request. partition_id (Union[dict, ~google.cloud.datastore_v1.types.PartitionId]): Entities are partitioned into subsets, identified by a partition ID. Queries are scoped to a single partition. This partition ID is normalized with the standard default context @@ -358,7 +355,9 @@ def run_query( # Sanity check: We have some fields which are mutually exclusive; # raise ValueError if more than one is sent. - google.api_core.protobuf_helpers.check_oneof(query=query, gql_query=gql_query) + google.api_core.protobuf_helpers.check_oneof( + query=query, gql_query=gql_query, + ) request = datastore_pb2.RunQueryRequest( project_id=project_id, @@ -384,6 +383,90 @@ def run_query( request, retry=retry, timeout=timeout, metadata=metadata ) + def reserve_ids( + self, + project_id, + keys, + database_id=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Prevents the supplied keys' IDs from being auto-allocated by Cloud + Datastore. + + Example: + >>> from google.cloud import datastore_v1 + >>> + >>> client = datastore_v1.DatastoreClient() + >>> + >>> # TODO: Initialize `project_id`: + >>> project_id = '' + >>> + >>> # TODO: Initialize `keys`: + >>> keys = [] + >>> + >>> response = client.reserve_ids(project_id, keys) + + Args: + project_id (str): Required. The ID of the project against which to make the request. + keys (list[Union[dict, ~google.cloud.datastore_v1.types.Key]]): Required. A list of keys with complete key paths whose numeric IDs should not be + auto-allocated. + + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.datastore_v1.types.Key` + database_id (str): If not empty, the ID of the database against which to make the request. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.datastore_v1.types.ReserveIdsResponse` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "reserve_ids" not in self._inner_api_calls: + self._inner_api_calls[ + "reserve_ids" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.reserve_ids, + default_retry=self._method_configs["ReserveIds"].retry, + default_timeout=self._method_configs["ReserveIds"].timeout, + client_info=self._client_info, + ) + + request = datastore_pb2.ReserveIdsRequest( + project_id=project_id, keys=keys, database_id=database_id, + ) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("project_id", project_id)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + + return self._inner_api_calls["reserve_ids"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + def begin_transaction( self, project_id, @@ -406,7 +489,7 @@ def begin_transaction( >>> response = client.begin_transaction(project_id) Args: - project_id (str): The ID of the project against which to make the request. + project_id (str): Required. The ID of the project against which to make the request. transaction_options (Union[dict, ~google.cloud.datastore_v1.types.TransactionOptions]): Options for a new transaction. If a dict is provided, it must be of the same form as the protobuf @@ -442,7 +525,7 @@ def begin_transaction( ) request = datastore_pb2.BeginTransactionRequest( - project_id=project_id, transaction_options=transaction_options + project_id=project_id, transaction_options=transaction_options, ) if metadata is None: metadata = [] @@ -464,8 +547,8 @@ def begin_transaction( def commit( self, project_id, - mode, - mutations, + mode=None, + mutations=None, transaction=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, @@ -477,24 +560,20 @@ def commit( Example: >>> from google.cloud import datastore_v1 - >>> from google.cloud.datastore_v1 import enums >>> >>> client = datastore_v1.DatastoreClient() >>> >>> # TODO: Initialize `project_id`: >>> project_id = '' >>> - >>> # TODO: Initialize `mode`: - >>> mode = enums.CommitRequest.Mode.MODE_UNSPECIFIED - >>> - >>> # TODO: Initialize `mutations`: - >>> mutations = [] - >>> - >>> response = client.commit(project_id, mode, mutations) + >>> response = client.commit(project_id) Args: - project_id (str): The ID of the project against which to make the request. + project_id (str): Required. The ID of the project against which to make the request. mode (~google.cloud.datastore_v1.types.Mode): The type of commit to perform. Defaults to ``TRANSACTIONAL``. + transaction (bytes): The identifier of the transaction associated with the commit. A + transaction identifier is returned by a call to + ``Datastore.BeginTransaction``. mutations (list[Union[dict, ~google.cloud.datastore_v1.types.Mutation]]): The mutations to perform. When mode is ``TRANSACTIONAL``, mutations affecting a single entity are @@ -511,9 +590,6 @@ def commit( If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.datastore_v1.types.Mutation` - transaction (bytes): The identifier of the transaction associated with the commit. A - transaction identifier is returned by a call to - ``Datastore.BeginTransaction``. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will be retried using a default configuration. @@ -546,13 +622,13 @@ def commit( # Sanity check: We have some fields which are mutually exclusive; # raise ValueError if more than one is sent. - google.api_core.protobuf_helpers.check_oneof(transaction=transaction) + google.api_core.protobuf_helpers.check_oneof(transaction=transaction,) request = datastore_pb2.CommitRequest( project_id=project_id, mode=mode, - mutations=mutations, transaction=transaction, + mutations=mutations, ) if metadata is None: metadata = [] @@ -596,8 +672,8 @@ def rollback( >>> response = client.rollback(project_id, transaction) Args: - project_id (str): The ID of the project against which to make the request. - transaction (bytes): The transaction identifier, returned by a call to + project_id (str): Required. The ID of the project against which to make the request. + transaction (bytes): Required. The transaction identifier, returned by a call to ``Datastore.BeginTransaction``. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will @@ -630,7 +706,7 @@ def rollback( ) request = datastore_pb2.RollbackRequest( - project_id=project_id, transaction=transaction + project_id=project_id, transaction=transaction, ) if metadata is None: metadata = [] @@ -675,8 +751,8 @@ def allocate_ids( >>> response = client.allocate_ids(project_id, keys) Args: - project_id (str): The ID of the project against which to make the request. - keys (list[Union[dict, ~google.cloud.datastore_v1.types.Key]]): A list of keys with incomplete key paths for which to allocate IDs. + project_id (str): Required. The ID of the project against which to make the request. + keys (list[Union[dict, ~google.cloud.datastore_v1.types.Key]]): Required. A list of keys with incomplete key paths for which to allocate IDs. No key may be reserved/read-only. If a dict is provided, it must be of the same form as the protobuf @@ -711,7 +787,7 @@ def allocate_ids( client_info=self._client_info, ) - request = datastore_pb2.AllocateIdsRequest(project_id=project_id, keys=keys) + request = datastore_pb2.AllocateIdsRequest(project_id=project_id, keys=keys,) if metadata is None: metadata = [] metadata = list(metadata) @@ -728,87 +804,3 @@ def allocate_ids( return self._inner_api_calls["allocate_ids"]( request, retry=retry, timeout=timeout, metadata=metadata ) - - def reserve_ids( - self, - project_id, - keys, - database_id=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Prevents the supplied keys' IDs from being auto-allocated by Cloud - Datastore. - - Example: - >>> from google.cloud import datastore_v1 - >>> - >>> client = datastore_v1.DatastoreClient() - >>> - >>> # TODO: Initialize `project_id`: - >>> project_id = '' - >>> - >>> # TODO: Initialize `keys`: - >>> keys = [] - >>> - >>> response = client.reserve_ids(project_id, keys) - - Args: - project_id (str): The ID of the project against which to make the request. - keys (list[Union[dict, ~google.cloud.datastore_v1.types.Key]]): A list of keys with complete key paths whose numeric IDs should not be - auto-allocated. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.datastore_v1.types.Key` - database_id (str): If not empty, the ID of the database against which to make the request. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.datastore_v1.types.ReserveIdsResponse` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "reserve_ids" not in self._inner_api_calls: - self._inner_api_calls[ - "reserve_ids" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.reserve_ids, - default_retry=self._method_configs["ReserveIds"].retry, - default_timeout=self._method_configs["ReserveIds"].timeout, - client_info=self._client_info, - ) - - request = datastore_pb2.ReserveIdsRequest( - project_id=project_id, keys=keys, database_id=database_id - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("project_id", project_id)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["reserve_ids"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) diff --git a/google/cloud/datastore_v1/gapic/datastore_client_config.py b/google/cloud/datastore_v1/gapic/datastore_client_config.py index 95822b8b..5346b3ce 100644 --- a/google/cloud/datastore_v1/gapic/datastore_client_config.py +++ b/google/cloud/datastore_v1/gapic/datastore_client_config.py @@ -27,6 +27,11 @@ "retry_codes_name": "idempotent", "retry_params_name": "default", }, + "ReserveIds": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default", + }, "BeginTransaction": { "timeout_millis": 60000, "retry_codes_name": "non_idempotent", @@ -47,11 +52,6 @@ "retry_codes_name": "non_idempotent", "retry_params_name": "default", }, - "ReserveIds": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, }, } } diff --git a/google/cloud/datastore_v1/gapic/enums.py b/google/cloud/datastore_v1/gapic/enums.py index b56d0fd3..f84538a3 100644 --- a/google/cloud/datastore_v1/gapic/enums.py +++ b/google/cloud/datastore_v1/gapic/enums.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2019 Google LLC +# Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -21,8 +21,8 @@ class NullValue(enum.IntEnum): """ - ``NullValue`` is a singleton enumeration to represent the null value for - the ``Value`` type union. + ``NullValue`` is a singleton enumeration to represent the null value + for the ``Value`` type union. The JSON representation for ``NullValue`` is JSON ``null``. diff --git a/google/cloud/datastore_v1/gapic/transports/datastore_grpc_transport.py b/google/cloud/datastore_v1/gapic/transports/datastore_grpc_transport.py index c7c640c4..74552d8a 100644 --- a/google/cloud/datastore_v1/gapic/transports/datastore_grpc_transport.py +++ b/google/cloud/datastore_v1/gapic/transports/datastore_grpc_transport.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2019 Google LLC +# Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -56,7 +56,7 @@ def __init__( # exception (channels come with credentials baked in already). if channel is not None and credentials is not None: raise ValueError( - "The `channel` and `credentials` arguments are mutually " "exclusive." + "The `channel` and `credentials` arguments are mutually " "exclusive.", ) # Create the channel. @@ -74,7 +74,9 @@ def __init__( # gRPC uses objects called "stubs" that are bound to the # channel and provide a basic method for each RPC. - self._stubs = {"datastore_stub": datastore_pb2_grpc.DatastoreStub(channel)} + self._stubs = { + "datastore_stub": datastore_pb2_grpc.DatastoreStub(channel), + } @classmethod def create_channel( @@ -134,6 +136,20 @@ def run_query(self): """ return self._stubs["datastore_stub"].RunQuery + @property + def reserve_ids(self): + """Return the gRPC stub for :meth:`DatastoreClient.reserve_ids`. + + Prevents the supplied keys' IDs from being auto-allocated by Cloud + Datastore. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["datastore_stub"].ReserveIds + @property def begin_transaction(self): """Return the gRPC stub for :meth:`DatastoreClient.begin_transaction`. @@ -187,17 +203,3 @@ def allocate_ids(self): deserialized response object. """ return self._stubs["datastore_stub"].AllocateIds - - @property - def reserve_ids(self): - """Return the gRPC stub for :meth:`DatastoreClient.reserve_ids`. - - Prevents the supplied keys' IDs from being auto-allocated by Cloud - Datastore. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["datastore_stub"].ReserveIds diff --git a/google/cloud/datastore_v1/proto/datastore.proto b/google/cloud/datastore_v1/proto/datastore.proto index 51d69acb..ad016194 100644 --- a/google/cloud/datastore_v1/proto/datastore.proto +++ b/google/cloud/datastore_v1/proto/datastore.proto @@ -1,4 +1,4 @@ -// Copyright 2018 Google Inc. +// Copyright 2019 Google LLC. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -17,6 +17,8 @@ syntax = "proto3"; package google.datastore.v1; import "google/api/annotations.proto"; +import "google/api/client.proto"; +import "google/api/field_behavior.proto"; import "google/datastore/v1/entity.proto"; import "google/datastore/v1/query.proto"; @@ -26,6 +28,7 @@ option java_multiple_files = true; option java_outer_classname = "DatastoreProto"; option java_package = "com.google.datastore.v1"; option php_namespace = "Google\\Cloud\\Datastore\\V1"; +option ruby_package = "Google::Cloud::Datastore::V1"; // Each RPC normalizes the partition IDs of the keys in its input entities, // and always returns entities with keys with normalized partition IDs. @@ -35,12 +38,18 @@ option php_namespace = "Google\\Cloud\\Datastore\\V1"; // the request. // service Datastore { + option (google.api.default_host) = "datastore.googleapis.com"; + option (google.api.oauth_scopes) = + "https://www.googleapis.com/auth/cloud-platform," + "https://www.googleapis.com/auth/datastore"; + // Looks up entities by key. rpc Lookup(LookupRequest) returns (LookupResponse) { option (google.api.http) = { post: "/v1/projects/{project_id}:lookup" body: "*" }; + option (google.api.method_signature) = "project_id,read_options,keys"; } // Queries for entities. @@ -52,12 +61,12 @@ service Datastore { } // Begins a new transaction. - rpc BeginTransaction(BeginTransactionRequest) - returns (BeginTransactionResponse) { + rpc BeginTransaction(BeginTransactionRequest) returns (BeginTransactionResponse) { option (google.api.http) = { post: "/v1/projects/{project_id}:beginTransaction" body: "*" }; + option (google.api.method_signature) = "project_id"; } // Commits a transaction, optionally creating, deleting or modifying some @@ -67,6 +76,8 @@ service Datastore { post: "/v1/projects/{project_id}:commit" body: "*" }; + option (google.api.method_signature) = "project_id,mode,transaction,mutations"; + option (google.api.method_signature) = "project_id,mode,mutations"; } // Rolls back a transaction. @@ -75,6 +86,7 @@ service Datastore { post: "/v1/projects/{project_id}:rollback" body: "*" }; + option (google.api.method_signature) = "project_id,transaction"; } // Allocates IDs for the given keys, which is useful for referencing an entity @@ -84,6 +96,7 @@ service Datastore { post: "/v1/projects/{project_id}:allocateIds" body: "*" }; + option (google.api.method_signature) = "project_id,keys"; } // Prevents the supplied keys' IDs from being auto-allocated by Cloud @@ -93,19 +106,20 @@ service Datastore { post: "/v1/projects/{project_id}:reserveIds" body: "*" }; + option (google.api.method_signature) = "project_id,keys"; } } // The request for [Datastore.Lookup][google.datastore.v1.Datastore.Lookup]. message LookupRequest { - // The ID of the project against which to make the request. - string project_id = 8; + // Required. The ID of the project against which to make the request. + string project_id = 8 [(google.api.field_behavior) = REQUIRED]; // The options for this lookup request. ReadOptions read_options = 1; - // Keys of entities to look up. - repeated Key keys = 3; + // Required. Keys of entities to look up. + repeated Key keys = 3 [(google.api.field_behavior) = REQUIRED]; } // The response for [Datastore.Lookup][google.datastore.v1.Datastore.Lookup]. @@ -128,8 +142,8 @@ message LookupResponse { // The request for [Datastore.RunQuery][google.datastore.v1.Datastore.RunQuery]. message RunQueryRequest { - // The ID of the project against which to make the request. - string project_id = 8; + // Required. The ID of the project against which to make the request. + string project_id = 8 [(google.api.field_behavior) = REQUIRED]; // Entities are partitioned into subsets, identified by a partition ID. // Queries are scoped to a single partition. @@ -150,8 +164,7 @@ message RunQueryRequest { } } -// The response for -// [Datastore.RunQuery][google.datastore.v1.Datastore.RunQuery]. +// The response for [Datastore.RunQuery][google.datastore.v1.Datastore.RunQuery]. message RunQueryResponse { // A batch of query results (always present). QueryResultBatch batch = 1; @@ -160,18 +173,16 @@ message RunQueryResponse { Query query = 2; } -// The request for -// [Datastore.BeginTransaction][google.datastore.v1.Datastore.BeginTransaction]. +// The request for [Datastore.BeginTransaction][google.datastore.v1.Datastore.BeginTransaction]. message BeginTransactionRequest { - // The ID of the project against which to make the request. - string project_id = 8; + // Required. The ID of the project against which to make the request. + string project_id = 8 [(google.api.field_behavior) = REQUIRED]; // Options for a new transaction. TransactionOptions transaction_options = 10; } -// The response for -// [Datastore.BeginTransaction][google.datastore.v1.Datastore.BeginTransaction]. +// The response for [Datastore.BeginTransaction][google.datastore.v1.Datastore.BeginTransaction]. message BeginTransactionResponse { // The transaction identifier (always present). bytes transaction = 1; @@ -179,18 +190,19 @@ message BeginTransactionResponse { // The request for [Datastore.Rollback][google.datastore.v1.Datastore.Rollback]. message RollbackRequest { - // The ID of the project against which to make the request. - string project_id = 8; + // Required. The ID of the project against which to make the request. + string project_id = 8 [(google.api.field_behavior) = REQUIRED]; - // The transaction identifier, returned by a call to + // Required. The transaction identifier, returned by a call to // [Datastore.BeginTransaction][google.datastore.v1.Datastore.BeginTransaction]. - bytes transaction = 1; + bytes transaction = 1 [(google.api.field_behavior) = REQUIRED]; } -// The response for -// [Datastore.Rollback][google.datastore.v1.Datastore.Rollback]. (an empty -// message). -message RollbackResponse {} +// The response for [Datastore.Rollback][google.datastore.v1.Datastore.Rollback]. +// (an empty message). +message RollbackResponse { + +} // The request for [Datastore.Commit][google.datastore.v1.Datastore.Commit]. message CommitRequest { @@ -208,8 +220,8 @@ message CommitRequest { NON_TRANSACTIONAL = 2; } - // The ID of the project against which to make the request. - string project_id = 8; + // Required. The ID of the project against which to make the request. + string project_id = 8 [(google.api.field_behavior) = REQUIRED]; // The type of commit to perform. Defaults to `TRANSACTIONAL`. Mode mode = 5; @@ -249,42 +261,40 @@ message CommitResponse { int32 index_updates = 4; } -// The request for -// [Datastore.AllocateIds][google.datastore.v1.Datastore.AllocateIds]. +// The request for [Datastore.AllocateIds][google.datastore.v1.Datastore.AllocateIds]. message AllocateIdsRequest { - // The ID of the project against which to make the request. - string project_id = 8; + // Required. The ID of the project against which to make the request. + string project_id = 8 [(google.api.field_behavior) = REQUIRED]; - // A list of keys with incomplete key paths for which to allocate IDs. + // Required. A list of keys with incomplete key paths for which to allocate IDs. // No key may be reserved/read-only. - repeated Key keys = 1; + repeated Key keys = 1 [(google.api.field_behavior) = REQUIRED]; } -// The response for -// [Datastore.AllocateIds][google.datastore.v1.Datastore.AllocateIds]. +// The response for [Datastore.AllocateIds][google.datastore.v1.Datastore.AllocateIds]. message AllocateIdsResponse { // The keys specified in the request (in the same order), each with // its key path completed with a newly allocated ID. repeated Key keys = 1; } -// The request for -// [Datastore.ReserveIds][google.datastore.v1.Datastore.ReserveIds]. +// The request for [Datastore.ReserveIds][google.datastore.v1.Datastore.ReserveIds]. message ReserveIdsRequest { - // The ID of the project against which to make the request. - string project_id = 8; + // Required. The ID of the project against which to make the request. + string project_id = 8 [(google.api.field_behavior) = REQUIRED]; // If not empty, the ID of the database against which to make the request. string database_id = 9; - // A list of keys with complete key paths whose numeric IDs should not be + // Required. A list of keys with complete key paths whose numeric IDs should not be // auto-allocated. - repeated Key keys = 1; + repeated Key keys = 1 [(google.api.field_behavior) = REQUIRED]; } -// The response for -// [Datastore.ReserveIds][google.datastore.v1.Datastore.ReserveIds]. -message ReserveIdsResponse {} +// The response for [Datastore.ReserveIds][google.datastore.v1.Datastore.ReserveIds]. +message ReserveIdsResponse { + +} // A mutation to apply to an entity. message Mutation { @@ -374,10 +384,8 @@ message ReadOptions { // Options for beginning a new transaction. // // Transactions can be created explicitly with calls to -// [Datastore.BeginTransaction][google.datastore.v1.Datastore.BeginTransaction] -// or implicitly by setting -// [ReadOptions.new_transaction][google.datastore.v1.ReadOptions.new_transaction] -// in read requests. +// [Datastore.BeginTransaction][google.datastore.v1.Datastore.BeginTransaction] or implicitly by setting +// [ReadOptions.new_transaction][google.datastore.v1.ReadOptions.new_transaction] in read requests. message TransactionOptions { // Options specific to read / write transactions. message ReadWrite { @@ -386,7 +394,9 @@ message TransactionOptions { } // Options specific to read-only transactions. - message ReadOnly {} + message ReadOnly { + + } // The `mode` of the transaction, indicating whether write operations are // supported. diff --git a/google/cloud/datastore_v1/proto/datastore_pb2.py b/google/cloud/datastore_v1/proto/datastore_pb2.py index c62dea63..cf7a3cfd 100644 --- a/google/cloud/datastore_v1/proto/datastore_pb2.py +++ b/google/cloud/datastore_v1/proto/datastore_pb2.py @@ -1,10 +1,7 @@ # -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! # source: google/cloud/datastore_v1/proto/datastore.proto - -import sys - -_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) +"""Generated protocol buffer code.""" from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message from google.protobuf import reflection as _reflection @@ -16,6 +13,8 @@ from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.api import client_pb2 as google_dot_api_dot_client__pb2 +from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 from google.cloud.datastore_v1.proto import ( entity_pb2 as google_dot_cloud_dot_datastore__v1_dot_proto_dot_entity__pb2, ) @@ -28,14 +27,13 @@ name="google/cloud/datastore_v1/proto/datastore.proto", package="google.datastore.v1", syntax="proto3", - serialized_options=_b( - "\n\027com.google.datastore.v1B\016DatastoreProtoP\001Z\n\x0fproperty_filter\x18\x02 \x01(\x0b\x32#.google.datastore.v1.PropertyFilterH\x00\x42\r\n\x0b\x66ilter_type"\xa9\x01\n\x0f\x43ompositeFilter\x12\x39\n\x02op\x18\x01 \x01(\x0e\x32-.google.datastore.v1.CompositeFilter.Operator\x12,\n\x07\x66ilters\x18\x02 \x03(\x0b\x32\x1b.google.datastore.v1.Filter"-\n\x08Operator\x12\x18\n\x14OPERATOR_UNSPECIFIED\x10\x00\x12\x07\n\x03\x41ND\x10\x01"\xc7\x02\n\x0ePropertyFilter\x12\x38\n\x08property\x18\x01 \x01(\x0b\x32&.google.datastore.v1.PropertyReference\x12\x38\n\x02op\x18\x02 \x01(\x0e\x32,.google.datastore.v1.PropertyFilter.Operator\x12)\n\x05value\x18\x03 \x01(\x0b\x32\x1a.google.datastore.v1.Value"\x95\x01\n\x08Operator\x12\x18\n\x14OPERATOR_UNSPECIFIED\x10\x00\x12\r\n\tLESS_THAN\x10\x01\x12\x16\n\x12LESS_THAN_OR_EQUAL\x10\x02\x12\x10\n\x0cGREATER_THAN\x10\x03\x12\x19\n\x15GREATER_THAN_OR_EQUAL\x10\x04\x12\t\n\x05\x45QUAL\x10\x05\x12\x10\n\x0cHAS_ANCESTOR\x10\x0b"\xa5\x02\n\x08GqlQuery\x12\x14\n\x0cquery_string\x18\x01 \x01(\t\x12\x16\n\x0e\x61llow_literals\x18\x02 \x01(\x08\x12H\n\x0enamed_bindings\x18\x05 \x03(\x0b\x32\x30.google.datastore.v1.GqlQuery.NamedBindingsEntry\x12\x43\n\x13positional_bindings\x18\x04 \x03(\x0b\x32&.google.datastore.v1.GqlQueryParameter\x1a\\\n\x12NamedBindingsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x35\n\x05value\x18\x02 \x01(\x0b\x32&.google.datastore.v1.GqlQueryParameter:\x02\x38\x01"d\n\x11GqlQueryParameter\x12+\n\x05value\x18\x02 \x01(\x0b\x32\x1a.google.datastore.v1.ValueH\x00\x12\x10\n\x06\x63ursor\x18\x03 \x01(\x0cH\x00\x42\x10\n\x0eparameter_type"\xde\x03\n\x10QueryResultBatch\x12\x17\n\x0fskipped_results\x18\x06 \x01(\x05\x12\x16\n\x0eskipped_cursor\x18\x03 \x01(\x0c\x12H\n\x12\x65ntity_result_type\x18\x01 \x01(\x0e\x32,.google.datastore.v1.EntityResult.ResultType\x12\x39\n\x0e\x65ntity_results\x18\x02 \x03(\x0b\x32!.google.datastore.v1.EntityResult\x12\x12\n\nend_cursor\x18\x04 \x01(\x0c\x12K\n\x0cmore_results\x18\x05 \x01(\x0e\x32\x35.google.datastore.v1.QueryResultBatch.MoreResultsType\x12\x18\n\x10snapshot_version\x18\x07 \x01(\x03"\x98\x01\n\x0fMoreResultsType\x12!\n\x1dMORE_RESULTS_TYPE_UNSPECIFIED\x10\x00\x12\x10\n\x0cNOT_FINISHED\x10\x01\x12\x1c\n\x18MORE_RESULTS_AFTER_LIMIT\x10\x02\x12\x1d\n\x19MORE_RESULTS_AFTER_CURSOR\x10\x04\x12\x13\n\x0fNO_MORE_RESULTS\x10\x03\x42\x9d\x01\n\x17\x63om.google.datastore.v1B\nQueryProtoP\x01Z\n\x0fproperty_filter\x18\x02 \x01(\x0b\x32#.google.datastore.v1.PropertyFilterH\x00\x42\r\n\x0b\x66ilter_type"\xa9\x01\n\x0f\x43ompositeFilter\x12\x39\n\x02op\x18\x01 \x01(\x0e\x32-.google.datastore.v1.CompositeFilter.Operator\x12,\n\x07\x66ilters\x18\x02 \x03(\x0b\x32\x1b.google.datastore.v1.Filter"-\n\x08Operator\x12\x18\n\x14OPERATOR_UNSPECIFIED\x10\x00\x12\x07\n\x03\x41ND\x10\x01"\xc7\x02\n\x0ePropertyFilter\x12\x38\n\x08property\x18\x01 \x01(\x0b\x32&.google.datastore.v1.PropertyReference\x12\x38\n\x02op\x18\x02 \x01(\x0e\x32,.google.datastore.v1.PropertyFilter.Operator\x12)\n\x05value\x18\x03 \x01(\x0b\x32\x1a.google.datastore.v1.Value"\x95\x01\n\x08Operator\x12\x18\n\x14OPERATOR_UNSPECIFIED\x10\x00\x12\r\n\tLESS_THAN\x10\x01\x12\x16\n\x12LESS_THAN_OR_EQUAL\x10\x02\x12\x10\n\x0cGREATER_THAN\x10\x03\x12\x19\n\x15GREATER_THAN_OR_EQUAL\x10\x04\x12\t\n\x05\x45QUAL\x10\x05\x12\x10\n\x0cHAS_ANCESTOR\x10\x0b"\xa5\x02\n\x08GqlQuery\x12\x14\n\x0cquery_string\x18\x01 \x01(\t\x12\x16\n\x0e\x61llow_literals\x18\x02 \x01(\x08\x12H\n\x0enamed_bindings\x18\x05 \x03(\x0b\x32\x30.google.datastore.v1.GqlQuery.NamedBindingsEntry\x12\x43\n\x13positional_bindings\x18\x04 \x03(\x0b\x32&.google.datastore.v1.GqlQueryParameter\x1a\\\n\x12NamedBindingsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x35\n\x05value\x18\x02 \x01(\x0b\x32&.google.datastore.v1.GqlQueryParameter:\x02\x38\x01"d\n\x11GqlQueryParameter\x12+\n\x05value\x18\x02 \x01(\x0b\x32\x1a.google.datastore.v1.ValueH\x00\x12\x10\n\x06\x63ursor\x18\x03 \x01(\x0cH\x00\x42\x10\n\x0eparameter_type"\xde\x03\n\x10QueryResultBatch\x12\x17\n\x0fskipped_results\x18\x06 \x01(\x05\x12\x16\n\x0eskipped_cursor\x18\x03 \x01(\x0c\x12H\n\x12\x65ntity_result_type\x18\x01 \x01(\x0e\x32,.google.datastore.v1.EntityResult.ResultType\x12\x39\n\x0e\x65ntity_results\x18\x02 \x03(\x0b\x32!.google.datastore.v1.EntityResult\x12\x12\n\nend_cursor\x18\x04 \x01(\x0c\x12K\n\x0cmore_results\x18\x05 \x01(\x0e\x32\x35.google.datastore.v1.QueryResultBatch.MoreResultsType\x12\x18\n\x10snapshot_version\x18\x07 \x01(\x03"\x98\x01\n\x0fMoreResultsType\x12!\n\x1dMORE_RESULTS_TYPE_UNSPECIFIED\x10\x00\x12\x10\n\x0cNOT_FINISHED\x10\x01\x12\x1c\n\x18MORE_RESULTS_AFTER_LIMIT\x10\x02\x12\x1d\n\x19MORE_RESULTS_AFTER_CURSOR\x10\x04\x12\x13\n\x0fNO_MORE_RESULTS\x10\x03\x42\xbc\x01\n\x17\x63om.google.datastore.v1B\nQueryProtoP\x01Z= 0 if specified. """, # @@protoc_insertion_point(class_scope:google.datastore.v1.Query) - ), + }, ) _sym_db.RegisterMessage(Query) KindExpression = _reflection.GeneratedProtocolMessageType( "KindExpression", (_message.Message,), - dict( - DESCRIPTOR=_KINDEXPRESSION, - __module__="google.cloud.datastore_v1.proto.query_pb2", - __doc__="""A representation of a kind. - + { + "DESCRIPTOR": _KINDEXPRESSION, + "__module__": "google.cloud.datastore_v1.proto.query_pb2", + "__doc__": """A representation of a kind. Attributes: name: The name of the kind. """, # @@protoc_insertion_point(class_scope:google.datastore.v1.KindExpression) - ), + }, ) _sym_db.RegisterMessage(KindExpression) PropertyReference = _reflection.GeneratedProtocolMessageType( "PropertyReference", (_message.Message,), - dict( - DESCRIPTOR=_PROPERTYREFERENCE, - __module__="google.cloud.datastore_v1.proto.query_pb2", - __doc__="""A reference to a property relative to the kind expressions. - + { + "DESCRIPTOR": _PROPERTYREFERENCE, + "__module__": "google.cloud.datastore_v1.proto.query_pb2", + "__doc__": """A reference to a property relative to the kind expressions. Attributes: name: - The name of the property. If name includes "."s, it may be + The name of the property. If name includes “.”s, it may be interpreted as a property name path. """, # @@protoc_insertion_point(class_scope:google.datastore.v1.PropertyReference) - ), + }, ) _sym_db.RegisterMessage(PropertyReference) Projection = _reflection.GeneratedProtocolMessageType( "Projection", (_message.Message,), - dict( - DESCRIPTOR=_PROJECTION, - __module__="google.cloud.datastore_v1.proto.query_pb2", - __doc__="""A representation of a property in a projection. - + { + "DESCRIPTOR": _PROJECTION, + "__module__": "google.cloud.datastore_v1.proto.query_pb2", + "__doc__": """A representation of a property in a projection. Attributes: property: The property to project. """, # @@protoc_insertion_point(class_scope:google.datastore.v1.Projection) - ), + }, ) _sym_db.RegisterMessage(Projection) PropertyOrder = _reflection.GeneratedProtocolMessageType( "PropertyOrder", (_message.Message,), - dict( - DESCRIPTOR=_PROPERTYORDER, - __module__="google.cloud.datastore_v1.proto.query_pb2", - __doc__="""The desired order for a specific property. - + { + "DESCRIPTOR": _PROPERTYORDER, + "__module__": "google.cloud.datastore_v1.proto.query_pb2", + "__doc__": """The desired order for a specific property. Attributes: property: @@ -1437,18 +1549,17 @@ The direction to order by. Defaults to ``ASCENDING``. """, # @@protoc_insertion_point(class_scope:google.datastore.v1.PropertyOrder) - ), + }, ) _sym_db.RegisterMessage(PropertyOrder) Filter = _reflection.GeneratedProtocolMessageType( "Filter", (_message.Message,), - dict( - DESCRIPTOR=_FILTER, - __module__="google.cloud.datastore_v1.proto.query_pb2", - __doc__="""A holder for any type of filter. - + { + "DESCRIPTOR": _FILTER, + "__module__": "google.cloud.datastore_v1.proto.query_pb2", + "__doc__": """A holder for any type of filter. Attributes: filter_type: @@ -1459,18 +1570,17 @@ A filter on a property. """, # @@protoc_insertion_point(class_scope:google.datastore.v1.Filter) - ), + }, ) _sym_db.RegisterMessage(Filter) CompositeFilter = _reflection.GeneratedProtocolMessageType( "CompositeFilter", (_message.Message,), - dict( - DESCRIPTOR=_COMPOSITEFILTER, - __module__="google.cloud.datastore_v1.proto.query_pb2", - __doc__="""A filter that merges multiple other filters using the given operator. - + { + "DESCRIPTOR": _COMPOSITEFILTER, + "__module__": "google.cloud.datastore_v1.proto.query_pb2", + "__doc__": """A filter that merges multiple other filters using the given operator. Attributes: op: @@ -1480,18 +1590,17 @@ filter. """, # @@protoc_insertion_point(class_scope:google.datastore.v1.CompositeFilter) - ), + }, ) _sym_db.RegisterMessage(CompositeFilter) PropertyFilter = _reflection.GeneratedProtocolMessageType( "PropertyFilter", (_message.Message,), - dict( - DESCRIPTOR=_PROPERTYFILTER, - __module__="google.cloud.datastore_v1.proto.query_pb2", - __doc__="""A filter on a specific property. - + { + "DESCRIPTOR": _PROPERTYFILTER, + "__module__": "google.cloud.datastore_v1.proto.query_pb2", + "__doc__": """A filter on a specific property. Attributes: property: @@ -1502,28 +1611,27 @@ The value to compare the property to. """, # @@protoc_insertion_point(class_scope:google.datastore.v1.PropertyFilter) - ), + }, ) _sym_db.RegisterMessage(PropertyFilter) GqlQuery = _reflection.GeneratedProtocolMessageType( "GqlQuery", (_message.Message,), - dict( - NamedBindingsEntry=_reflection.GeneratedProtocolMessageType( + { + "NamedBindingsEntry": _reflection.GeneratedProtocolMessageType( "NamedBindingsEntry", (_message.Message,), - dict( - DESCRIPTOR=_GQLQUERY_NAMEDBINDINGSENTRY, - __module__="google.cloud.datastore_v1.proto.query_pb2" + { + "DESCRIPTOR": _GQLQUERY_NAMEDBINDINGSENTRY, + "__module__": "google.cloud.datastore_v1.proto.query_pb2" # @@protoc_insertion_point(class_scope:google.datastore.v1.GqlQuery.NamedBindingsEntry) - ), + }, ), - DESCRIPTOR=_GQLQUERY, - __module__="google.cloud.datastore_v1.proto.query_pb2", - __doc__="""A `GQL - query `__. - + "DESCRIPTOR": _GQLQUERY, + "__module__": "google.cloud.datastore_v1.proto.query_pb2", + "__doc__": """A `GQL query + `__. Attributes: query_string: @@ -1548,7 +1656,7 @@ The inverse must also be true. """, # @@protoc_insertion_point(class_scope:google.datastore.v1.GqlQuery) - ), + }, ) _sym_db.RegisterMessage(GqlQuery) _sym_db.RegisterMessage(GqlQuery.NamedBindingsEntry) @@ -1556,11 +1664,10 @@ GqlQueryParameter = _reflection.GeneratedProtocolMessageType( "GqlQueryParameter", (_message.Message,), - dict( - DESCRIPTOR=_GQLQUERYPARAMETER, - __module__="google.cloud.datastore_v1.proto.query_pb2", - __doc__="""A binding parameter for a GQL query. - + { + "DESCRIPTOR": _GQLQUERYPARAMETER, + "__module__": "google.cloud.datastore_v1.proto.query_pb2", + "__doc__": """A binding parameter for a GQL query. Attributes: parameter_type: @@ -1572,18 +1679,17 @@ batches. """, # @@protoc_insertion_point(class_scope:google.datastore.v1.GqlQueryParameter) - ), + }, ) _sym_db.RegisterMessage(GqlQueryParameter) QueryResultBatch = _reflection.GeneratedProtocolMessageType( "QueryResultBatch", (_message.Message,), - dict( - DESCRIPTOR=_QUERYRESULTBATCH, - __module__="google.cloud.datastore_v1.proto.query_pb2", - __doc__="""A batch of results produced by a query. - + { + "DESCRIPTOR": _QUERYRESULTBATCH, + "__module__": "google.cloud.datastore_v1.proto.query_pb2", + "__doc__": """A batch of results produced by a query. Attributes: skipped_results: @@ -1602,17 +1708,17 @@ The state of the query after the current batch. snapshot_version: The version number of the snapshot this batch was returned - from. This applies to the range of results from the query's + from. This applies to the range of results from the query’s ``start_cursor`` (or the beginning of the query if no cursor - was given) to this batch's ``end_cursor`` (not the query's + was given) to this batch’s ``end_cursor`` (not the query’s ``end_cursor``). In a single transaction, subsequent query result batches for the same query can have a greater snapshot - version number. Each batch's snapshot version is valid for all + version number. Each batch’s snapshot version is valid for all preceding batches. The value will be zero for eventually consistent queries. """, # @@protoc_insertion_point(class_scope:google.datastore.v1.QueryResultBatch) - ), + }, ) _sym_db.RegisterMessage(QueryResultBatch) diff --git a/google/cloud/datastore_v1/proto/query_pb2_grpc.py b/google/cloud/datastore_v1/proto/query_pb2_grpc.py index 07cb78fe..8a939394 100644 --- a/google/cloud/datastore_v1/proto/query_pb2_grpc.py +++ b/google/cloud/datastore_v1/proto/query_pb2_grpc.py @@ -1,2 +1,3 @@ # Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" import grpc diff --git a/noxfile.py b/noxfile.py index 1e43b20e..187124ab 100644 --- a/noxfile.py +++ b/noxfile.py @@ -23,14 +23,15 @@ import nox -BLACK_VERSION = "black==19.3b0" +BLACK_VERSION = "black==19.10b0" BLACK_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] -if os.path.exists("samples"): - BLACK_PATHS.append("samples") +DEFAULT_PYTHON_VERSION = "3.8" +SYSTEM_TEST_PYTHON_VERSIONS = ["2.7", "3.8"] +UNIT_TEST_PYTHON_VERSIONS = ["2.7", "3.5", "3.6", "3.7", "3.8"] -@nox.session(python="3.7") +@nox.session(python=DEFAULT_PYTHON_VERSION) def lint(session): """Run linters. @@ -38,7 +39,9 @@ def lint(session): serious code quality issues. """ session.install("flake8", BLACK_VERSION) - session.run("black", "--check", *BLACK_PATHS) + session.run( + "black", "--check", *BLACK_PATHS, + ) session.run("flake8", "google", "tests") @@ -53,10 +56,12 @@ def blacken(session): check the state of the `gcp_ubuntu_config` we use for that Kokoro run. """ session.install(BLACK_VERSION) - session.run("black", *BLACK_PATHS) + session.run( + "black", *BLACK_PATHS, + ) -@nox.session(python="3.7") +@nox.session(python=DEFAULT_PYTHON_VERSION) def lint_setup_py(session): """Verify that setup.py is valid (including RST check).""" session.install("docutils", "pygments") @@ -84,13 +89,13 @@ def default(session): ) -@nox.session(python=["2.7", "3.5", "3.6", "3.7", "3.8"]) +@nox.session(python=UNIT_TEST_PYTHON_VERSIONS) def unit(session): """Run the unit test suite.""" default(session) -@nox.session(python=["2.7", "3.7"]) +@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) def system(session): """Run the system test suite.""" system_test_path = os.path.join("tests", "system.py") @@ -110,7 +115,9 @@ def system(session): # Install all test dependencies, then install this package into the # virtualenv's dist-packages. - session.install("mock", "pytest", "google-cloud-testutils") + session.install( + "mock", "pytest", "google-cloud-testutils", + ) session.install("-e", ".") # Run py.test against the system tests. @@ -120,7 +127,7 @@ def system(session): session.run("py.test", "--quiet", system_test_folder_path, *session.posargs) -@nox.session(python="3.7") +@nox.session(python=DEFAULT_PYTHON_VERSION) def cover(session): """Run the final coverage report. @@ -133,7 +140,7 @@ def cover(session): session.run("coverage", "erase") -@nox.session(python="3.7") +@nox.session(python=DEFAULT_PYTHON_VERSION) def docs(session): """Build the docs for this library.""" diff --git a/scripts/decrypt-secrets.sh b/scripts/decrypt-secrets.sh new file mode 100755 index 00000000..ff599eb2 --- /dev/null +++ b/scripts/decrypt-secrets.sh @@ -0,0 +1,33 @@ +#!/bin/bash + +# Copyright 2015 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" +ROOT=$( dirname "$DIR" ) + +# Work from the project root. +cd $ROOT + +# Use SECRET_MANAGER_PROJECT if set, fallback to cloud-devrel-kokoro-resources. +PROJECT_ID="${SECRET_MANAGER_PROJECT:-cloud-devrel-kokoro-resources}" + +gcloud secrets versions access latest --secret="python-docs-samples-test-env" \ + > testing/test-env.sh +gcloud secrets versions access latest \ + --secret="python-docs-samples-service-account" \ + > testing/service-account.json +gcloud secrets versions access latest \ + --secret="python-docs-samples-client-secrets" \ + > testing/client-secrets.json \ No newline at end of file diff --git a/scripts/readme-gen/readme_gen.py b/scripts/readme-gen/readme_gen.py new file mode 100644 index 00000000..d309d6e9 --- /dev/null +++ b/scripts/readme-gen/readme_gen.py @@ -0,0 +1,66 @@ +#!/usr/bin/env python + +# Copyright 2016 Google Inc +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Generates READMEs using configuration defined in yaml.""" + +import argparse +import io +import os +import subprocess + +import jinja2 +import yaml + + +jinja_env = jinja2.Environment( + trim_blocks=True, + loader=jinja2.FileSystemLoader( + os.path.abspath(os.path.join(os.path.dirname(__file__), 'templates')))) + +README_TMPL = jinja_env.get_template('README.tmpl.rst') + + +def get_help(file): + return subprocess.check_output(['python', file, '--help']).decode() + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument('source') + parser.add_argument('--destination', default='README.rst') + + args = parser.parse_args() + + source = os.path.abspath(args.source) + root = os.path.dirname(source) + destination = os.path.join(root, args.destination) + + jinja_env.globals['get_help'] = get_help + + with io.open(source, 'r') as f: + config = yaml.load(f) + + # This allows get_help to execute in the right directory. + os.chdir(root) + + output = README_TMPL.render(config) + + with io.open(destination, 'w') as f: + f.write(output) + + +if __name__ == '__main__': + main() diff --git a/scripts/readme-gen/templates/README.tmpl.rst b/scripts/readme-gen/templates/README.tmpl.rst new file mode 100644 index 00000000..4fd23976 --- /dev/null +++ b/scripts/readme-gen/templates/README.tmpl.rst @@ -0,0 +1,87 @@ +{# The following line is a lie. BUT! Once jinja2 is done with it, it will + become truth! #} +.. This file is automatically generated. Do not edit this file directly. + +{{product.name}} Python Samples +=============================================================================== + +.. image:: https://gstatic.com/cloudssh/images/open-btn.png + :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor={{folder}}/README.rst + + +This directory contains samples for {{product.name}}. {{product.description}} + +{{description}} + +.. _{{product.name}}: {{product.url}} + +{% if required_api_url %} +To run the sample, you need to enable the API at: {{required_api_url}} +{% endif %} + +{% if required_role %} +To run the sample, you need to have `{{required_role}}` role. +{% endif %} + +{{other_required_steps}} + +{% if setup %} +Setup +------------------------------------------------------------------------------- + +{% for section in setup %} + +{% include section + '.tmpl.rst' %} + +{% endfor %} +{% endif %} + +{% if samples %} +Samples +------------------------------------------------------------------------------- + +{% for sample in samples %} +{{sample.name}} ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + +{% if not sample.hide_cloudshell_button %} +.. image:: https://gstatic.com/cloudssh/images/open-btn.png + :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor={{folder}}/{{sample.file}},{{folder}}/README.rst +{% endif %} + + +{{sample.description}} + +To run this sample: + +.. code-block:: bash + + $ python {{sample.file}} +{% if sample.show_help %} + + {{get_help(sample.file)|indent}} +{% endif %} + + +{% endfor %} +{% endif %} + +{% if cloud_client_library %} + +The client library +------------------------------------------------------------------------------- + +This sample uses the `Google Cloud Client Library for Python`_. +You can read the documentation for more details on API usage and use GitHub +to `browse the source`_ and `report issues`_. + +.. _Google Cloud Client Library for Python: + https://googlecloudplatform.github.io/google-cloud-python/ +.. _browse the source: + https://github.com/GoogleCloudPlatform/google-cloud-python +.. _report issues: + https://github.com/GoogleCloudPlatform/google-cloud-python/issues + +{% endif %} + +.. _Google Cloud SDK: https://cloud.google.com/sdk/ \ No newline at end of file diff --git a/scripts/readme-gen/templates/auth.tmpl.rst b/scripts/readme-gen/templates/auth.tmpl.rst new file mode 100644 index 00000000..1446b94a --- /dev/null +++ b/scripts/readme-gen/templates/auth.tmpl.rst @@ -0,0 +1,9 @@ +Authentication +++++++++++++++ + +This sample requires you to have authentication setup. Refer to the +`Authentication Getting Started Guide`_ for instructions on setting up +credentials for applications. + +.. _Authentication Getting Started Guide: + https://cloud.google.com/docs/authentication/getting-started diff --git a/scripts/readme-gen/templates/auth_api_key.tmpl.rst b/scripts/readme-gen/templates/auth_api_key.tmpl.rst new file mode 100644 index 00000000..11957ce2 --- /dev/null +++ b/scripts/readme-gen/templates/auth_api_key.tmpl.rst @@ -0,0 +1,14 @@ +Authentication +++++++++++++++ + +Authentication for this service is done via an `API Key`_. To obtain an API +Key: + +1. Open the `Cloud Platform Console`_ +2. Make sure that billing is enabled for your project. +3. From the **Credentials** page, create a new **API Key** or use an existing + one for your project. + +.. _API Key: + https://developers.google.com/api-client-library/python/guide/aaa_apikeys +.. _Cloud Console: https://console.cloud.google.com/project?_ diff --git a/scripts/readme-gen/templates/install_deps.tmpl.rst b/scripts/readme-gen/templates/install_deps.tmpl.rst new file mode 100644 index 00000000..a0406dba --- /dev/null +++ b/scripts/readme-gen/templates/install_deps.tmpl.rst @@ -0,0 +1,29 @@ +Install Dependencies +++++++++++++++++++++ + +#. Clone python-docs-samples and change directory to the sample directory you want to use. + + .. code-block:: bash + + $ git clone https://github.com/GoogleCloudPlatform/python-docs-samples.git + +#. Install `pip`_ and `virtualenv`_ if you do not already have them. You may want to refer to the `Python Development Environment Setup Guide`_ for Google Cloud Platform for instructions. + + .. _Python Development Environment Setup Guide: + https://cloud.google.com/python/setup + +#. Create a virtualenv. Samples are compatible with Python 2.7 and 3.4+. + + .. code-block:: bash + + $ virtualenv env + $ source env/bin/activate + +#. Install the dependencies needed to run the samples. + + .. code-block:: bash + + $ pip install -r requirements.txt + +.. _pip: https://pip.pypa.io/ +.. _virtualenv: https://virtualenv.pypa.io/ diff --git a/scripts/readme-gen/templates/install_portaudio.tmpl.rst b/scripts/readme-gen/templates/install_portaudio.tmpl.rst new file mode 100644 index 00000000..5ea33d18 --- /dev/null +++ b/scripts/readme-gen/templates/install_portaudio.tmpl.rst @@ -0,0 +1,35 @@ +Install PortAudio ++++++++++++++++++ + +Install `PortAudio`_. This is required by the `PyAudio`_ library to stream +audio from your computer's microphone. PyAudio depends on PortAudio for cross-platform compatibility, and is installed differently depending on the +platform. + +* For Mac OS X, you can use `Homebrew`_:: + + brew install portaudio + + **Note**: if you encounter an error when running `pip install` that indicates + it can't find `portaudio.h`, try running `pip install` with the following + flags:: + + pip install --global-option='build_ext' \ + --global-option='-I/usr/local/include' \ + --global-option='-L/usr/local/lib' \ + pyaudio + +* For Debian / Ubuntu Linux:: + + apt-get install portaudio19-dev python-all-dev + +* Windows may work without having to install PortAudio explicitly (it will get + installed with PyAudio). + +For more details, see the `PyAudio installation`_ page. + + +.. _PyAudio: https://people.csail.mit.edu/hubert/pyaudio/ +.. _PortAudio: http://www.portaudio.com/ +.. _PyAudio installation: + https://people.csail.mit.edu/hubert/pyaudio/#downloads +.. _Homebrew: http://brew.sh diff --git a/synth.metadata b/synth.metadata index 58b5cedc..865b99bc 100644 --- a/synth.metadata +++ b/synth.metadata @@ -1,32 +1,25 @@ { "sources": [ - { - "generator": { - "name": "artman", - "version": "2.0.0", - "dockerImage": "googleapis/artman@sha256:b3b47805231a305d0f40c4bf069df20f6a2635574e6d4259fac651d3f9f6e098" - } - }, { "git": { "name": ".", "remote": "git@github.com:googleapis/python-datastore", - "sha": "f9c0937f24f3a9874db6a8710e260c6ce2907069" + "sha": "f822b98873c829d4ae01d3de1b0d58e0076948fd" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "756b174de4a122461993c1c583345533d819936d", - "internalRef": "308824110" + "sha": "5202cfe3e5c2907a1a21a4c6d4bd0812029b6aa3", + "internalRef": "319247865" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "01b6f23d24b27878b48667ce597876d66b59780e" + "sha": "303271797a360f8a439203413f13a160f2f5b3b4" } } ], @@ -37,8 +30,16 @@ "apiName": "datastore", "apiVersion": "v1", "language": "python", - "generator": "gapic", - "config": "google/datastore/artman_datastore.yaml" + "generator": "bazel" + } + }, + { + "client": { + "source": "googleapis", + "apiName": "datastore_admin", + "apiVersion": "v1", + "language": "python", + "generator": "bazel" } } ] diff --git a/synth.py b/synth.py index 49e9f694..36b5150a 100644 --- a/synth.py +++ b/synth.py @@ -45,10 +45,39 @@ s.move(library / "google/cloud/datastore_admin_v1/proto") s.move(library / "google/cloud/datastore_admin_v1/gapic") +# TODO(busunkim): Remove during the microgenerator transition. +# This re-orders the parameters to avoid breaking existing code. +num = s.replace( +"google/**/datastore_client.py", +"""def commit\( +\s+self, +\s+project_id, +\s+mode=None, +\s+transaction=None, +\s+mutations=None, +\s+retry=google\.api_core\.gapic_v1\.method\.DEFAULT, +\s+timeout=google\.api_core\.gapic_v1\.method\.DEFAULT, +\s+metadata=None\):""", +"""def commit( + self, + project_id, + mode=None, + mutations=None, + transaction=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ):""" +) + +if num != 1: + raise Exception("Required replacement not made.") # ---------------------------------------------------------------------------- # Add templated files # ---------------------------------------------------------------------------- templated_files = common.py_library(unit_cov_level=97, cov_level=99) -s.move(templated_files, excludes=["docs/conf.py"]) +s.move(templated_files, excludes=["docs/conf.py", "docs/multiprocessing.rst"]) + +s.replace("noxfile.py", """["']sphinx['"]""", '''"sphinx<3.0.0"''') s.shell.run(["nox", "-s", "blacken"], hide_output=False) diff --git a/testing/.gitignore b/testing/.gitignore new file mode 100644 index 00000000..b05fbd63 --- /dev/null +++ b/testing/.gitignore @@ -0,0 +1,3 @@ +test-env.sh +service-account.json +client-secrets.json \ No newline at end of file diff --git a/tests/unit/test_client.py b/tests/unit/test_client.py index 5a7448fc..6a30089c 100644 --- a/tests/unit/test_client.py +++ b/tests/unit/test_client.py @@ -572,7 +572,7 @@ def test_get_multi_hit(self): client._datastore_api_internal = ds_api key = Key(kind, id_, project=self.PROJECT) - result, = client.get_multi([key]) + (result,) = client.get_multi([key]) new_key = result.key # Check the returned value is as expected. @@ -609,7 +609,7 @@ def test_get_multi_hit_w_transaction(self): key = Key(kind, id_, project=self.PROJECT) txn = client.transaction() txn._id = txn_id - result, = client.get_multi([key], transaction=txn) + (result,) = client.get_multi([key], transaction=txn) new_key = result.key # Check the returned value is as expected. diff --git a/tests/unit/test_key.py b/tests/unit/test_key.py index a157d50b..0478e2cb 100644 --- a/tests/unit/test_key.py +++ b/tests/unit/test_key.py @@ -358,7 +358,7 @@ def test_to_protobuf_defaults(self): self.assertEqual(pb.partition_id.namespace_id, "") # Check the element PB matches the partial key and kind. - elem, = list(pb.path) + (elem,) = list(pb.path) self.assertEqual(elem.kind, _KIND) # Unset values are False-y. self.assertEqual(elem.name, "")