From 483cc6e90eff74e746adcb2e5ea67decc64aa217 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 17 Aug 2021 10:00:45 -0600 Subject: [PATCH] feat: Added the Backup resource and Backup resource GetIamPolicy/SetIamPolicy to V1 feat: Added the RestoreService method to V1 (#63) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit feat: Added the Backup resource and Backup resource GetIamPolicy/SetIamPolicy to V1 feat: Added the RestoreService method to V1 Promoted additional Dataproc Metastore metadata management methods to V1 PiperOrigin-RevId: 391108398 Source-Link: https://github.com/googleapis/googleapis/commit/b6901e755abebb55b1907eb1c073ab95d5c5c749 Source-Link: https://github.com/googleapis/googleapis-gen/commit/020e77c452db4e0f4f77f6d7613c18eead9b8918 * 🦉 Updates from OwlBot See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Owl Bot --- google/cloud/metastore/__init__.py | 16 + google/cloud/metastore_v1/__init__.py | 16 + google/cloud/metastore_v1/gapic_metadata.json | 50 + .../dataproc_metastore/async_client.py | 467 +++++- .../services/dataproc_metastore/client.py | 481 ++++++- .../services/dataproc_metastore/pagers.py | 128 ++ .../dataproc_metastore/transports/base.py | 60 + .../dataproc_metastore/transports/grpc.py | 139 +- .../transports/grpc_asyncio.py | 145 +- google/cloud/metastore_v1/types/__init__.py | 16 + google/cloud/metastore_v1/types/metastore.py | 310 +++- scripts/fixup_metastore_v1_keywords.py | 5 + .../metastore_v1/test_dataproc_metastore.py | 1262 ++++++++++++++++- 13 files changed, 3042 insertions(+), 53 deletions(-) diff --git a/google/cloud/metastore/__init__.py b/google/cloud/metastore/__init__.py index 79d5194..f986278 100644 --- a/google/cloud/metastore/__init__.py +++ b/google/cloud/metastore/__init__.py @@ -21,15 +21,21 @@ DataprocMetastoreAsyncClient, ) +from google.cloud.metastore_v1.types.metastore import Backup +from google.cloud.metastore_v1.types.metastore import CreateBackupRequest from google.cloud.metastore_v1.types.metastore import CreateMetadataImportRequest from google.cloud.metastore_v1.types.metastore import CreateServiceRequest from google.cloud.metastore_v1.types.metastore import DatabaseDumpSpec +from google.cloud.metastore_v1.types.metastore import DeleteBackupRequest from google.cloud.metastore_v1.types.metastore import DeleteServiceRequest from google.cloud.metastore_v1.types.metastore import ExportMetadataRequest +from google.cloud.metastore_v1.types.metastore import GetBackupRequest from google.cloud.metastore_v1.types.metastore import GetMetadataImportRequest from google.cloud.metastore_v1.types.metastore import GetServiceRequest from google.cloud.metastore_v1.types.metastore import HiveMetastoreConfig from google.cloud.metastore_v1.types.metastore import KerberosConfig +from google.cloud.metastore_v1.types.metastore import ListBackupsRequest +from google.cloud.metastore_v1.types.metastore import ListBackupsResponse from google.cloud.metastore_v1.types.metastore import ListMetadataImportsRequest from google.cloud.metastore_v1.types.metastore import ListMetadataImportsResponse from google.cloud.metastore_v1.types.metastore import ListServicesRequest @@ -40,6 +46,8 @@ from google.cloud.metastore_v1.types.metastore import MetadataImport from google.cloud.metastore_v1.types.metastore import MetadataManagementActivity from google.cloud.metastore_v1.types.metastore import OperationMetadata +from google.cloud.metastore_v1.types.metastore import Restore +from google.cloud.metastore_v1.types.metastore import RestoreServiceRequest from google.cloud.metastore_v1.types.metastore import Secret from google.cloud.metastore_v1.types.metastore import Service from google.cloud.metastore_v1.types.metastore import UpdateMetadataImportRequest @@ -48,15 +56,21 @@ __all__ = ( "DataprocMetastoreClient", "DataprocMetastoreAsyncClient", + "Backup", + "CreateBackupRequest", "CreateMetadataImportRequest", "CreateServiceRequest", "DatabaseDumpSpec", + "DeleteBackupRequest", "DeleteServiceRequest", "ExportMetadataRequest", + "GetBackupRequest", "GetMetadataImportRequest", "GetServiceRequest", "HiveMetastoreConfig", "KerberosConfig", + "ListBackupsRequest", + "ListBackupsResponse", "ListMetadataImportsRequest", "ListMetadataImportsResponse", "ListServicesRequest", @@ -67,6 +81,8 @@ "MetadataImport", "MetadataManagementActivity", "OperationMetadata", + "Restore", + "RestoreServiceRequest", "Secret", "Service", "UpdateMetadataImportRequest", diff --git a/google/cloud/metastore_v1/__init__.py b/google/cloud/metastore_v1/__init__.py index 4410753..5fa9010 100644 --- a/google/cloud/metastore_v1/__init__.py +++ b/google/cloud/metastore_v1/__init__.py @@ -17,15 +17,21 @@ from .services.dataproc_metastore import DataprocMetastoreClient from .services.dataproc_metastore import DataprocMetastoreAsyncClient +from .types.metastore import Backup +from .types.metastore import CreateBackupRequest from .types.metastore import CreateMetadataImportRequest from .types.metastore import CreateServiceRequest from .types.metastore import DatabaseDumpSpec +from .types.metastore import DeleteBackupRequest from .types.metastore import DeleteServiceRequest from .types.metastore import ExportMetadataRequest +from .types.metastore import GetBackupRequest from .types.metastore import GetMetadataImportRequest from .types.metastore import GetServiceRequest from .types.metastore import HiveMetastoreConfig from .types.metastore import KerberosConfig +from .types.metastore import ListBackupsRequest +from .types.metastore import ListBackupsResponse from .types.metastore import ListMetadataImportsRequest from .types.metastore import ListMetadataImportsResponse from .types.metastore import ListServicesRequest @@ -36,6 +42,8 @@ from .types.metastore import MetadataImport from .types.metastore import MetadataManagementActivity from .types.metastore import OperationMetadata +from .types.metastore import Restore +from .types.metastore import RestoreServiceRequest from .types.metastore import Secret from .types.metastore import Service from .types.metastore import UpdateMetadataImportRequest @@ -43,16 +51,22 @@ __all__ = ( "DataprocMetastoreAsyncClient", + "Backup", + "CreateBackupRequest", "CreateMetadataImportRequest", "CreateServiceRequest", "DatabaseDumpSpec", "DataprocMetastoreClient", + "DeleteBackupRequest", "DeleteServiceRequest", "ExportMetadataRequest", + "GetBackupRequest", "GetMetadataImportRequest", "GetServiceRequest", "HiveMetastoreConfig", "KerberosConfig", + "ListBackupsRequest", + "ListBackupsResponse", "ListMetadataImportsRequest", "ListMetadataImportsResponse", "ListServicesRequest", @@ -63,6 +77,8 @@ "MetadataImport", "MetadataManagementActivity", "OperationMetadata", + "Restore", + "RestoreServiceRequest", "Secret", "Service", "UpdateMetadataImportRequest", diff --git a/google/cloud/metastore_v1/gapic_metadata.json b/google/cloud/metastore_v1/gapic_metadata.json index dd2f060..6995396 100644 --- a/google/cloud/metastore_v1/gapic_metadata.json +++ b/google/cloud/metastore_v1/gapic_metadata.json @@ -10,6 +10,11 @@ "grpc": { "libraryClient": "DataprocMetastoreClient", "rpcs": { + "CreateBackup": { + "methods": [ + "create_backup" + ] + }, "CreateMetadataImport": { "methods": [ "create_metadata_import" @@ -20,6 +25,11 @@ "create_service" ] }, + "DeleteBackup": { + "methods": [ + "delete_backup" + ] + }, "DeleteService": { "methods": [ "delete_service" @@ -30,6 +40,11 @@ "export_metadata" ] }, + "GetBackup": { + "methods": [ + "get_backup" + ] + }, "GetMetadataImport": { "methods": [ "get_metadata_import" @@ -40,6 +55,11 @@ "get_service" ] }, + "ListBackups": { + "methods": [ + "list_backups" + ] + }, "ListMetadataImports": { "methods": [ "list_metadata_imports" @@ -50,6 +70,11 @@ "list_services" ] }, + "RestoreService": { + "methods": [ + "restore_service" + ] + }, "UpdateMetadataImport": { "methods": [ "update_metadata_import" @@ -65,6 +90,11 @@ "grpc-async": { "libraryClient": "DataprocMetastoreAsyncClient", "rpcs": { + "CreateBackup": { + "methods": [ + "create_backup" + ] + }, "CreateMetadataImport": { "methods": [ "create_metadata_import" @@ -75,6 +105,11 @@ "create_service" ] }, + "DeleteBackup": { + "methods": [ + "delete_backup" + ] + }, "DeleteService": { "methods": [ "delete_service" @@ -85,6 +120,11 @@ "export_metadata" ] }, + "GetBackup": { + "methods": [ + "get_backup" + ] + }, "GetMetadataImport": { "methods": [ "get_metadata_import" @@ -95,6 +135,11 @@ "get_service" ] }, + "ListBackups": { + "methods": [ + "list_backups" + ] + }, "ListMetadataImports": { "methods": [ "list_metadata_imports" @@ -105,6 +150,11 @@ "list_services" ] }, + "RestoreService": { + "methods": [ + "restore_service" + ] + }, "UpdateMetadataImport": { "methods": [ "update_metadata_import" diff --git a/google/cloud/metastore_v1/services/dataproc_metastore/async_client.py b/google/cloud/metastore_v1/services/dataproc_metastore/async_client.py index 2deed26..53eaa2d 100644 --- a/google/cloud/metastore_v1/services/dataproc_metastore/async_client.py +++ b/google/cloud/metastore_v1/services/dataproc_metastore/async_client.py @@ -40,12 +40,11 @@ class DataprocMetastoreAsyncClient: """Configures and manages metastore services. Metastore services are - fully managed, highly available, auto-scaled, auto-healing, - OSS-native deployments of technical metadata management software. - Each metastore service exposes a network endpoint through which - metadata queries are served. Metadata queries can originate from a - variety of sources, including Apache Hive, Apache Presto, and Apache - Spark. + fully managed, highly available, autoscaled, autohealing, OSS-native + deployments of technical metadata management software. Each + metastore service exposes a network endpoint through which metadata + queries are served. Metadata queries can originate from a variety of + sources, including Apache Hive, Apache Presto, and Apache Spark. The Dataproc Metastore API defines the following resource model: @@ -69,6 +68,8 @@ class DataprocMetastoreAsyncClient: DEFAULT_ENDPOINT = DataprocMetastoreClient.DEFAULT_ENDPOINT DEFAULT_MTLS_ENDPOINT = DataprocMetastoreClient.DEFAULT_MTLS_ENDPOINT + backup_path = staticmethod(DataprocMetastoreClient.backup_path) + parse_backup_path = staticmethod(DataprocMetastoreClient.parse_backup_path) metadata_import_path = staticmethod(DataprocMetastoreClient.metadata_import_path) parse_metadata_import_path = staticmethod( DataprocMetastoreClient.parse_metadata_import_path @@ -1092,6 +1093,460 @@ async def export_metadata( # Done; return the response. return response + async def restore_service( + self, + request: metastore.RestoreServiceRequest = None, + *, + service: str = None, + backup: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Restores a service from a backup. + + Args: + request (:class:`google.cloud.metastore_v1.types.RestoreServiceRequest`): + The request object. Request message for + [DataprocMetastore.Restore][]. + service (:class:`str`): + Required. The relative resource name of the metastore + service to run restore, in the following form: + + ``projects/{project_id}/locations/{location_id}/services/{service_id}``. + + This corresponds to the ``service`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backup (:class:`str`): + Required. The relative resource name of the metastore + service backup to restore from, in the following form: + + ``projects/{project_id}/locations/{location_id}/services/{service_id}/backups/{backup_id}``. + + This corresponds to the ``backup`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.metastore_v1.types.Restore` The + details of a metadata restore operation. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([service, backup]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = metastore.RestoreServiceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if service is not None: + request.service = service + if backup is not None: + request.backup = backup + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.restore_service, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("service", request.service),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + metastore.Restore, + metadata_type=metastore.OperationMetadata, + ) + + # Done; return the response. + return response + + async def list_backups( + self, + request: metastore.ListBackupsRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListBackupsAsyncPager: + r"""Lists backups in a service. + + Args: + request (:class:`google.cloud.metastore_v1.types.ListBackupsRequest`): + The request object. Request message for + [DataprocMetastore.ListBackups][google.cloud.metastore.v1.DataprocMetastore.ListBackups]. + parent (:class:`str`): + Required. The relative resource name of the service + whose backups to list, in the following form: + + ``projects/{project_number}/locations/{location_id}/services/{service_id}/backups``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.metastore_v1.services.dataproc_metastore.pagers.ListBackupsAsyncPager: + Response message for + [DataprocMetastore.ListBackups][google.cloud.metastore.v1.DataprocMetastore.ListBackups]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = metastore.ListBackupsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_backups, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListBackupsAsyncPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_backup( + self, + request: metastore.GetBackupRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> metastore.Backup: + r"""Gets details of a single backup. + + Args: + request (:class:`google.cloud.metastore_v1.types.GetBackupRequest`): + The request object. Request message for + [DataprocMetastore.GetBackup][google.cloud.metastore.v1.DataprocMetastore.GetBackup]. + name (:class:`str`): + Required. The relative resource name of the backup to + retrieve, in the following form: + + ``projects/{project_number}/locations/{location_id}/services/{service_id}/backups/{backup_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.metastore_v1.types.Backup: + The details of a backup resource. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = metastore.GetBackupRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_backup, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def create_backup( + self, + request: metastore.CreateBackupRequest = None, + *, + parent: str = None, + backup: metastore.Backup = None, + backup_id: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a new backup in a given project and location. + + Args: + request (:class:`google.cloud.metastore_v1.types.CreateBackupRequest`): + The request object. Request message for + [DataprocMetastore.CreateBackup][google.cloud.metastore.v1.DataprocMetastore.CreateBackup]. + parent (:class:`str`): + Required. The relative resource name of the service in + which to create a backup of the following form: + + ``projects/{project_number}/locations/{location_id}/services/{service_id}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backup (:class:`google.cloud.metastore_v1.types.Backup`): + Required. The backup to create. The ``name`` field is + ignored. The ID of the created backup must be provided + in the request's ``backup_id`` field. + + This corresponds to the ``backup`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backup_id (:class:`str`): + Required. The ID of the backup, which + is used as the final component of the + backup's name. + This value must be between 1 and 64 + characters long, begin with a letter, + end with a letter or number, and consist + of alpha-numeric ASCII characters or + hyphens. + + This corresponds to the ``backup_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.metastore_v1.types.Backup` The + details of a backup resource. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, backup, backup_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = metastore.CreateBackupRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if backup is not None: + request.backup = backup + if backup_id is not None: + request.backup_id = backup_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_backup, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + metastore.Backup, + metadata_type=metastore.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_backup( + self, + request: metastore.DeleteBackupRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes a single backup. + + Args: + request (:class:`google.cloud.metastore_v1.types.DeleteBackupRequest`): + The request object. Request message for + [DataprocMetastore.DeleteBackup][google.cloud.metastore.v1.DataprocMetastore.DeleteBackup]. + name (:class:`str`): + Required. The relative resource name of the backup to + delete, in the following form: + + ``projects/{project_number}/locations/{location_id}/services/{service_id}/backups/{backup_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + The JSON representation for Empty is empty JSON + object {}. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = metastore.DeleteBackupRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_backup, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=metastore.OperationMetadata, + ) + + # Done; return the response. + return response + try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/google/cloud/metastore_v1/services/dataproc_metastore/client.py b/google/cloud/metastore_v1/services/dataproc_metastore/client.py index 4412113..2dca481 100644 --- a/google/cloud/metastore_v1/services/dataproc_metastore/client.py +++ b/google/cloud/metastore_v1/services/dataproc_metastore/client.py @@ -79,12 +79,11 @@ def get_transport_class( class DataprocMetastoreClient(metaclass=DataprocMetastoreClientMeta): """Configures and manages metastore services. Metastore services are - fully managed, highly available, auto-scaled, auto-healing, - OSS-native deployments of technical metadata management software. - Each metastore service exposes a network endpoint through which - metadata queries are served. Metadata queries can originate from a - variety of sources, including Apache Hive, Apache Presto, and Apache - Spark. + fully managed, highly available, autoscaled, autohealing, OSS-native + deployments of technical metadata management software. Each + metastore service exposes a network endpoint through which metadata + queries are served. Metadata queries can originate from a variety of + sources, including Apache Hive, Apache Presto, and Apache Spark. The Dataproc Metastore API defines the following resource model: @@ -185,6 +184,22 @@ def transport(self) -> DataprocMetastoreTransport: """ return self._transport + @staticmethod + def backup_path(project: str, location: str, service: str, backup: str,) -> str: + """Returns a fully-qualified backup string.""" + return "projects/{project}/locations/{location}/services/{service}/backups/{backup}".format( + project=project, location=location, service=service, backup=backup, + ) + + @staticmethod + def parse_backup_path(path: str) -> Dict[str, str]: + """Parses a backup path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/services/(?P.+?)/backups/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def metadata_import_path( project: str, location: str, service: str, metadata_import: str, @@ -1314,6 +1329,460 @@ def export_metadata( # Done; return the response. return response + def restore_service( + self, + request: metastore.RestoreServiceRequest = None, + *, + service: str = None, + backup: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Restores a service from a backup. + + Args: + request (google.cloud.metastore_v1.types.RestoreServiceRequest): + The request object. Request message for + [DataprocMetastore.Restore][]. + service (str): + Required. The relative resource name of the metastore + service to run restore, in the following form: + + ``projects/{project_id}/locations/{location_id}/services/{service_id}``. + + This corresponds to the ``service`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backup (str): + Required. The relative resource name of the metastore + service backup to restore from, in the following form: + + ``projects/{project_id}/locations/{location_id}/services/{service_id}/backups/{backup_id}``. + + This corresponds to the ``backup`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.metastore_v1.types.Restore` The + details of a metadata restore operation. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([service, backup]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a metastore.RestoreServiceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, metastore.RestoreServiceRequest): + request = metastore.RestoreServiceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if service is not None: + request.service = service + if backup is not None: + request.backup = backup + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.restore_service] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("service", request.service),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + metastore.Restore, + metadata_type=metastore.OperationMetadata, + ) + + # Done; return the response. + return response + + def list_backups( + self, + request: metastore.ListBackupsRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListBackupsPager: + r"""Lists backups in a service. + + Args: + request (google.cloud.metastore_v1.types.ListBackupsRequest): + The request object. Request message for + [DataprocMetastore.ListBackups][google.cloud.metastore.v1.DataprocMetastore.ListBackups]. + parent (str): + Required. The relative resource name of the service + whose backups to list, in the following form: + + ``projects/{project_number}/locations/{location_id}/services/{service_id}/backups``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.metastore_v1.services.dataproc_metastore.pagers.ListBackupsPager: + Response message for + [DataprocMetastore.ListBackups][google.cloud.metastore.v1.DataprocMetastore.ListBackups]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a metastore.ListBackupsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, metastore.ListBackupsRequest): + request = metastore.ListBackupsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_backups] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListBackupsPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + def get_backup( + self, + request: metastore.GetBackupRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> metastore.Backup: + r"""Gets details of a single backup. + + Args: + request (google.cloud.metastore_v1.types.GetBackupRequest): + The request object. Request message for + [DataprocMetastore.GetBackup][google.cloud.metastore.v1.DataprocMetastore.GetBackup]. + name (str): + Required. The relative resource name of the backup to + retrieve, in the following form: + + ``projects/{project_number}/locations/{location_id}/services/{service_id}/backups/{backup_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.metastore_v1.types.Backup: + The details of a backup resource. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a metastore.GetBackupRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, metastore.GetBackupRequest): + request = metastore.GetBackupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_backup] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def create_backup( + self, + request: metastore.CreateBackupRequest = None, + *, + parent: str = None, + backup: metastore.Backup = None, + backup_id: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates a new backup in a given project and location. + + Args: + request (google.cloud.metastore_v1.types.CreateBackupRequest): + The request object. Request message for + [DataprocMetastore.CreateBackup][google.cloud.metastore.v1.DataprocMetastore.CreateBackup]. + parent (str): + Required. The relative resource name of the service in + which to create a backup of the following form: + + ``projects/{project_number}/locations/{location_id}/services/{service_id}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backup (google.cloud.metastore_v1.types.Backup): + Required. The backup to create. The ``name`` field is + ignored. The ID of the created backup must be provided + in the request's ``backup_id`` field. + + This corresponds to the ``backup`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backup_id (str): + Required. The ID of the backup, which + is used as the final component of the + backup's name. + This value must be between 1 and 64 + characters long, begin with a letter, + end with a letter or number, and consist + of alpha-numeric ASCII characters or + hyphens. + + This corresponds to the ``backup_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.metastore_v1.types.Backup` The + details of a backup resource. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, backup, backup_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a metastore.CreateBackupRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, metastore.CreateBackupRequest): + request = metastore.CreateBackupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if backup is not None: + request.backup = backup + if backup_id is not None: + request.backup_id = backup_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_backup] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + metastore.Backup, + metadata_type=metastore.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_backup( + self, + request: metastore.DeleteBackupRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Deletes a single backup. + + Args: + request (google.cloud.metastore_v1.types.DeleteBackupRequest): + The request object. Request message for + [DataprocMetastore.DeleteBackup][google.cloud.metastore.v1.DataprocMetastore.DeleteBackup]. + name (str): + Required. The relative resource name of the backup to + delete, in the following form: + + ``projects/{project_number}/locations/{location_id}/services/{service_id}/backups/{backup_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + The JSON representation for Empty is empty JSON + object {}. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a metastore.DeleteBackupRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, metastore.DeleteBackupRequest): + request = metastore.DeleteBackupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_backup] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=metastore.OperationMetadata, + ) + + # Done; return the response. + return response + try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/google/cloud/metastore_v1/services/dataproc_metastore/pagers.py b/google/cloud/metastore_v1/services/dataproc_metastore/pagers.py index d144ef3..3db75b4 100644 --- a/google/cloud/metastore_v1/services/dataproc_metastore/pagers.py +++ b/google/cloud/metastore_v1/services/dataproc_metastore/pagers.py @@ -281,3 +281,131 @@ async def async_generator(): def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListBackupsPager: + """A pager for iterating through ``list_backups`` requests. + + This class thinly wraps an initial + :class:`google.cloud.metastore_v1.types.ListBackupsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``backups`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListBackups`` requests and continue to iterate + through the ``backups`` field on the + corresponding responses. + + All the usual :class:`google.cloud.metastore_v1.types.ListBackupsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., metastore.ListBackupsResponse], + request: metastore.ListBackupsRequest, + response: metastore.ListBackupsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.metastore_v1.types.ListBackupsRequest): + The initial request object. + response (google.cloud.metastore_v1.types.ListBackupsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = metastore.ListBackupsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterable[metastore.ListBackupsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterable[metastore.Backup]: + for page in self.pages: + yield from page.backups + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListBackupsAsyncPager: + """A pager for iterating through ``list_backups`` requests. + + This class thinly wraps an initial + :class:`google.cloud.metastore_v1.types.ListBackupsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``backups`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListBackups`` requests and continue to iterate + through the ``backups`` field on the + corresponding responses. + + All the usual :class:`google.cloud.metastore_v1.types.ListBackupsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[metastore.ListBackupsResponse]], + request: metastore.ListBackupsRequest, + response: metastore.ListBackupsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.metastore_v1.types.ListBackupsRequest): + The initial request object. + response (google.cloud.metastore_v1.types.ListBackupsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = metastore.ListBackupsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterable[metastore.ListBackupsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterable[metastore.Backup]: + async def async_generator(): + async for page in self.pages: + for response in page.backups: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/google/cloud/metastore_v1/services/dataproc_metastore/transports/base.py b/google/cloud/metastore_v1/services/dataproc_metastore/transports/base.py index c7ee6ca..ddd34dd 100644 --- a/google/cloud/metastore_v1/services/dataproc_metastore/transports/base.py +++ b/google/cloud/metastore_v1/services/dataproc_metastore/transports/base.py @@ -190,6 +190,21 @@ def _prep_wrapped_messages(self, client_info): self.export_metadata: gapic_v1.method.wrap_method( self.export_metadata, default_timeout=60.0, client_info=client_info, ), + self.restore_service: gapic_v1.method.wrap_method( + self.restore_service, default_timeout=60.0, client_info=client_info, + ), + self.list_backups: gapic_v1.method.wrap_method( + self.list_backups, default_timeout=None, client_info=client_info, + ), + self.get_backup: gapic_v1.method.wrap_method( + self.get_backup, default_timeout=None, client_info=client_info, + ), + self.create_backup: gapic_v1.method.wrap_method( + self.create_backup, default_timeout=60.0, client_info=client_info, + ), + self.delete_backup: gapic_v1.method.wrap_method( + self.delete_backup, default_timeout=60.0, client_info=client_info, + ), } @property @@ -292,5 +307,50 @@ def export_metadata( ]: raise NotImplementedError() + @property + def restore_service( + self, + ) -> Callable[ + [metastore.RestoreServiceRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def list_backups( + self, + ) -> Callable[ + [metastore.ListBackupsRequest], + Union[metastore.ListBackupsResponse, Awaitable[metastore.ListBackupsResponse]], + ]: + raise NotImplementedError() + + @property + def get_backup( + self, + ) -> Callable[ + [metastore.GetBackupRequest], + Union[metastore.Backup, Awaitable[metastore.Backup]], + ]: + raise NotImplementedError() + + @property + def create_backup( + self, + ) -> Callable[ + [metastore.CreateBackupRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def delete_backup( + self, + ) -> Callable[ + [metastore.DeleteBackupRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + __all__ = ("DataprocMetastoreTransport",) diff --git a/google/cloud/metastore_v1/services/dataproc_metastore/transports/grpc.py b/google/cloud/metastore_v1/services/dataproc_metastore/transports/grpc.py index 08f2204..a21685c 100644 --- a/google/cloud/metastore_v1/services/dataproc_metastore/transports/grpc.py +++ b/google/cloud/metastore_v1/services/dataproc_metastore/transports/grpc.py @@ -34,12 +34,11 @@ class DataprocMetastoreGrpcTransport(DataprocMetastoreTransport): """gRPC backend transport for DataprocMetastore. Configures and manages metastore services. Metastore services are - fully managed, highly available, auto-scaled, auto-healing, - OSS-native deployments of technical metadata management software. - Each metastore service exposes a network endpoint through which - metadata queries are served. Metadata queries can originate from a - variety of sources, including Apache Hive, Apache Presto, and Apache - Spark. + fully managed, highly available, autoscaled, autohealing, OSS-native + deployments of technical metadata management software. Each + metastore service exposes a network endpoint through which metadata + queries are served. Metadata queries can originate from a variety of + sources, including Apache Hive, Apache Presto, and Apache Spark. The Dataproc Metastore API defines the following resource model: @@ -528,5 +527,133 @@ def export_metadata( ) return self._stubs["export_metadata"] + @property + def restore_service( + self, + ) -> Callable[[metastore.RestoreServiceRequest], operations_pb2.Operation]: + r"""Return a callable for the restore service method over gRPC. + + Restores a service from a backup. + + Returns: + Callable[[~.RestoreServiceRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "restore_service" not in self._stubs: + self._stubs["restore_service"] = self.grpc_channel.unary_unary( + "/google.cloud.metastore.v1.DataprocMetastore/RestoreService", + request_serializer=metastore.RestoreServiceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["restore_service"] + + @property + def list_backups( + self, + ) -> Callable[[metastore.ListBackupsRequest], metastore.ListBackupsResponse]: + r"""Return a callable for the list backups method over gRPC. + + Lists backups in a service. + + Returns: + Callable[[~.ListBackupsRequest], + ~.ListBackupsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_backups" not in self._stubs: + self._stubs["list_backups"] = self.grpc_channel.unary_unary( + "/google.cloud.metastore.v1.DataprocMetastore/ListBackups", + request_serializer=metastore.ListBackupsRequest.serialize, + response_deserializer=metastore.ListBackupsResponse.deserialize, + ) + return self._stubs["list_backups"] + + @property + def get_backup(self) -> Callable[[metastore.GetBackupRequest], metastore.Backup]: + r"""Return a callable for the get backup method over gRPC. + + Gets details of a single backup. + + Returns: + Callable[[~.GetBackupRequest], + ~.Backup]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_backup" not in self._stubs: + self._stubs["get_backup"] = self.grpc_channel.unary_unary( + "/google.cloud.metastore.v1.DataprocMetastore/GetBackup", + request_serializer=metastore.GetBackupRequest.serialize, + response_deserializer=metastore.Backup.deserialize, + ) + return self._stubs["get_backup"] + + @property + def create_backup( + self, + ) -> Callable[[metastore.CreateBackupRequest], operations_pb2.Operation]: + r"""Return a callable for the create backup method over gRPC. + + Creates a new backup in a given project and location. + + Returns: + Callable[[~.CreateBackupRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_backup" not in self._stubs: + self._stubs["create_backup"] = self.grpc_channel.unary_unary( + "/google.cloud.metastore.v1.DataprocMetastore/CreateBackup", + request_serializer=metastore.CreateBackupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_backup"] + + @property + def delete_backup( + self, + ) -> Callable[[metastore.DeleteBackupRequest], operations_pb2.Operation]: + r"""Return a callable for the delete backup method over gRPC. + + Deletes a single backup. + + Returns: + Callable[[~.DeleteBackupRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_backup" not in self._stubs: + self._stubs["delete_backup"] = self.grpc_channel.unary_unary( + "/google.cloud.metastore.v1.DataprocMetastore/DeleteBackup", + request_serializer=metastore.DeleteBackupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_backup"] + __all__ = ("DataprocMetastoreGrpcTransport",) diff --git a/google/cloud/metastore_v1/services/dataproc_metastore/transports/grpc_asyncio.py b/google/cloud/metastore_v1/services/dataproc_metastore/transports/grpc_asyncio.py index e87b398..b5a31f2 100644 --- a/google/cloud/metastore_v1/services/dataproc_metastore/transports/grpc_asyncio.py +++ b/google/cloud/metastore_v1/services/dataproc_metastore/transports/grpc_asyncio.py @@ -36,12 +36,11 @@ class DataprocMetastoreGrpcAsyncIOTransport(DataprocMetastoreTransport): """gRPC AsyncIO backend transport for DataprocMetastore. Configures and manages metastore services. Metastore services are - fully managed, highly available, auto-scaled, auto-healing, - OSS-native deployments of technical metadata management software. - Each metastore service exposes a network endpoint through which - metadata queries are served. Metadata queries can originate from a - variety of sources, including Apache Hive, Apache Presto, and Apache - Spark. + fully managed, highly available, autoscaled, autohealing, OSS-native + deployments of technical metadata management software. Each + metastore service exposes a network endpoint through which metadata + queries are served. Metadata queries can originate from a variety of + sources, including Apache Hive, Apache Presto, and Apache Spark. The Dataproc Metastore API defines the following resource model: @@ -552,5 +551,139 @@ def export_metadata( ) return self._stubs["export_metadata"] + @property + def restore_service( + self, + ) -> Callable[ + [metastore.RestoreServiceRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the restore service method over gRPC. + + Restores a service from a backup. + + Returns: + Callable[[~.RestoreServiceRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "restore_service" not in self._stubs: + self._stubs["restore_service"] = self.grpc_channel.unary_unary( + "/google.cloud.metastore.v1.DataprocMetastore/RestoreService", + request_serializer=metastore.RestoreServiceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["restore_service"] + + @property + def list_backups( + self, + ) -> Callable[ + [metastore.ListBackupsRequest], Awaitable[metastore.ListBackupsResponse] + ]: + r"""Return a callable for the list backups method over gRPC. + + Lists backups in a service. + + Returns: + Callable[[~.ListBackupsRequest], + Awaitable[~.ListBackupsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_backups" not in self._stubs: + self._stubs["list_backups"] = self.grpc_channel.unary_unary( + "/google.cloud.metastore.v1.DataprocMetastore/ListBackups", + request_serializer=metastore.ListBackupsRequest.serialize, + response_deserializer=metastore.ListBackupsResponse.deserialize, + ) + return self._stubs["list_backups"] + + @property + def get_backup( + self, + ) -> Callable[[metastore.GetBackupRequest], Awaitable[metastore.Backup]]: + r"""Return a callable for the get backup method over gRPC. + + Gets details of a single backup. + + Returns: + Callable[[~.GetBackupRequest], + Awaitable[~.Backup]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_backup" not in self._stubs: + self._stubs["get_backup"] = self.grpc_channel.unary_unary( + "/google.cloud.metastore.v1.DataprocMetastore/GetBackup", + request_serializer=metastore.GetBackupRequest.serialize, + response_deserializer=metastore.Backup.deserialize, + ) + return self._stubs["get_backup"] + + @property + def create_backup( + self, + ) -> Callable[[metastore.CreateBackupRequest], Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the create backup method over gRPC. + + Creates a new backup in a given project and location. + + Returns: + Callable[[~.CreateBackupRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_backup" not in self._stubs: + self._stubs["create_backup"] = self.grpc_channel.unary_unary( + "/google.cloud.metastore.v1.DataprocMetastore/CreateBackup", + request_serializer=metastore.CreateBackupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_backup"] + + @property + def delete_backup( + self, + ) -> Callable[[metastore.DeleteBackupRequest], Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the delete backup method over gRPC. + + Deletes a single backup. + + Returns: + Callable[[~.DeleteBackupRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_backup" not in self._stubs: + self._stubs["delete_backup"] = self.grpc_channel.unary_unary( + "/google.cloud.metastore.v1.DataprocMetastore/DeleteBackup", + request_serializer=metastore.DeleteBackupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_backup"] + __all__ = ("DataprocMetastoreGrpcAsyncIOTransport",) diff --git a/google/cloud/metastore_v1/types/__init__.py b/google/cloud/metastore_v1/types/__init__.py index 7ce234e..78c53ea 100644 --- a/google/cloud/metastore_v1/types/__init__.py +++ b/google/cloud/metastore_v1/types/__init__.py @@ -14,15 +14,21 @@ # limitations under the License. # from .metastore import ( + Backup, + CreateBackupRequest, CreateMetadataImportRequest, CreateServiceRequest, DatabaseDumpSpec, + DeleteBackupRequest, DeleteServiceRequest, ExportMetadataRequest, + GetBackupRequest, GetMetadataImportRequest, GetServiceRequest, HiveMetastoreConfig, KerberosConfig, + ListBackupsRequest, + ListBackupsResponse, ListMetadataImportsRequest, ListMetadataImportsResponse, ListServicesRequest, @@ -33,6 +39,8 @@ MetadataImport, MetadataManagementActivity, OperationMetadata, + Restore, + RestoreServiceRequest, Secret, Service, UpdateMetadataImportRequest, @@ -40,15 +48,21 @@ ) __all__ = ( + "Backup", + "CreateBackupRequest", "CreateMetadataImportRequest", "CreateServiceRequest", "DatabaseDumpSpec", + "DeleteBackupRequest", "DeleteServiceRequest", "ExportMetadataRequest", + "GetBackupRequest", "GetMetadataImportRequest", "GetServiceRequest", "HiveMetastoreConfig", "KerberosConfig", + "ListBackupsRequest", + "ListBackupsResponse", "ListMetadataImportsRequest", "ListMetadataImportsResponse", "ListServicesRequest", @@ -59,6 +73,8 @@ "MetadataImport", "MetadataManagementActivity", "OperationMetadata", + "Restore", + "RestoreServiceRequest", "Secret", "Service", "UpdateMetadataImportRequest", diff --git a/google/cloud/metastore_v1/types/metastore.py b/google/cloud/metastore_v1/types/metastore.py index fdc6a16..084fdd5 100644 --- a/google/cloud/metastore_v1/types/metastore.py +++ b/google/cloud/metastore_v1/types/metastore.py @@ -32,6 +32,8 @@ "MetadataManagementActivity", "MetadataImport", "MetadataExport", + "Backup", + "Restore", "ListServicesRequest", "ListServicesResponse", "GetServiceRequest", @@ -43,7 +45,13 @@ "GetMetadataImportRequest", "CreateMetadataImportRequest", "UpdateMetadataImportRequest", + "ListBackupsRequest", + "ListBackupsResponse", + "GetBackupRequest", + "CreateBackupRequest", + "DeleteBackupRequest", "ExportMetadataRequest", + "RestoreServiceRequest", "OperationMetadata", "LocationMetadata", "DatabaseDumpSpec", @@ -251,11 +259,15 @@ class MetadataManagementActivity(proto.Message): metadata_exports (Sequence[google.cloud.metastore_v1.types.MetadataExport]): Output only. The latest metadata exports of the metastore service. + restores (Sequence[google.cloud.metastore_v1.types.Restore]): + Output only. The latest restores of the + metastore service. """ metadata_exports = proto.RepeatedField( proto.MESSAGE, number=1, message="MetadataExport", ) + restores = proto.RepeatedField(proto.MESSAGE, number=2, message="Restore",) class MetadataImport(proto.Message): @@ -273,10 +285,13 @@ class MetadataImport(proto.Message): The description of the metadata import. create_time (google.protobuf.timestamp_pb2.Timestamp): Output only. The time when the metadata - import was created. + import was started. update_time (google.protobuf.timestamp_pb2.Timestamp): Output only. The time when the metadata import was last updated. + end_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the metadata + import finished. state (google.cloud.metastore_v1.types.MetadataImport.State): Output only. The current state of the metadata import. @@ -324,6 +339,7 @@ class DatabaseType(proto.Enum): description = proto.Field(proto.STRING, number=2,) create_time = proto.Field(proto.MESSAGE, number=3, message=timestamp_pb2.Timestamp,) update_time = proto.Field(proto.MESSAGE, number=4, message=timestamp_pb2.Timestamp,) + end_time = proto.Field(proto.MESSAGE, number=7, message=timestamp_pb2.Timestamp,) state = proto.Field(proto.ENUM, number=5, enum=State,) @@ -363,6 +379,96 @@ class State(proto.Enum): ) +class Backup(proto.Message): + r"""The details of a backup resource. + Attributes: + name (str): + Immutable. The relative resource name of the backup, in the + following form: + + ``projects/{project_number}/locations/{location_id}/services/{service_id}/backups/{backup_id}`` + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the backup was + started. + end_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the backup + finished creating. + state (google.cloud.metastore_v1.types.Backup.State): + Output only. The current state of the backup. + service_revision (google.cloud.metastore_v1.types.Service): + Output only. The revision of the service at + the time of backup. + description (str): + The description of the backup. + restoring_services (Sequence[str]): + Output only. Services that are restoring from + the backup. + """ + + class State(proto.Enum): + r"""The current state of the backup.""" + STATE_UNSPECIFIED = 0 + CREATING = 1 + DELETING = 2 + ACTIVE = 3 + FAILED = 4 + RESTORING = 5 + + name = proto.Field(proto.STRING, number=1,) + create_time = proto.Field(proto.MESSAGE, number=2, message=timestamp_pb2.Timestamp,) + end_time = proto.Field(proto.MESSAGE, number=3, message=timestamp_pb2.Timestamp,) + state = proto.Field(proto.ENUM, number=4, enum=State,) + service_revision = proto.Field(proto.MESSAGE, number=5, message="Service",) + description = proto.Field(proto.STRING, number=6,) + restoring_services = proto.RepeatedField(proto.STRING, number=7,) + + +class Restore(proto.Message): + r"""The details of a metadata restore operation. + Attributes: + start_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the restore + started. + end_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the restore ended. + state (google.cloud.metastore_v1.types.Restore.State): + Output only. The current state of the + restore. + backup (str): + Output only. The relative resource name of the metastore + service backup to restore from, in the following form: + + ``projects/{project_id}/locations/{location_id}/services/{service_id}/backups/{backup_id}``. + type_ (google.cloud.metastore_v1.types.Restore.RestoreType): + Output only. The type of restore. + details (str): + Output only. The restore details containing + the revision of the service to be restored to, + in format of JSON. + """ + + class State(proto.Enum): + r"""The current state of the restore.""" + STATE_UNSPECIFIED = 0 + RUNNING = 1 + SUCCEEDED = 2 + FAILED = 3 + CANCELLED = 4 + + class RestoreType(proto.Enum): + r"""The type of restore. If unspecified, defaults to ``METADATA_ONLY``.""" + RESTORE_TYPE_UNSPECIFIED = 0 + FULL = 1 + METADATA_ONLY = 2 + + start_time = proto.Field(proto.MESSAGE, number=1, message=timestamp_pb2.Timestamp,) + end_time = proto.Field(proto.MESSAGE, number=2, message=timestamp_pb2.Timestamp,) + state = proto.Field(proto.ENUM, number=3, enum=State,) + backup = proto.Field(proto.STRING, number=4,) + type_ = proto.Field(proto.ENUM, number=5, enum=RestoreType,) + details = proto.Field(proto.STRING, number=6,) + + class ListServicesRequest(proto.Message): r"""Request message for [DataprocMetastore.ListServices][google.cloud.metastore.v1.DataprocMetastore.ListServices]. @@ -741,6 +847,167 @@ class UpdateMetadataImportRequest(proto.Message): request_id = proto.Field(proto.STRING, number=3,) +class ListBackupsRequest(proto.Message): + r"""Request message for + [DataprocMetastore.ListBackups][google.cloud.metastore.v1.DataprocMetastore.ListBackups]. + + Attributes: + parent (str): + Required. The relative resource name of the service whose + backups to list, in the following form: + + ``projects/{project_number}/locations/{location_id}/services/{service_id}/backups``. + page_size (int): + Optional. The maximum number of backups to + return. The response may contain less than the + maximum number. If unspecified, no more than 500 + backups are returned. The maximum value is 1000; + values above 1000 are changed to 1000. + page_token (str): + Optional. A page token, received from a previous + [DataprocMetastore.ListBackups][google.cloud.metastore.v1.DataprocMetastore.ListBackups] + call. Provide this token to retrieve the subsequent page. + + To retrieve the first page, supply an empty page token. + + When paginating, other parameters provided to + [DataprocMetastore.ListBackups][google.cloud.metastore.v1.DataprocMetastore.ListBackups] + must match the call that provided the page token. + filter (str): + Optional. The filter to apply to list + results. + order_by (str): + Optional. Specify the ordering of results as described in + `Sorting + Order `__. + If not specified, the results will be sorted in the default + order. + """ + + parent = proto.Field(proto.STRING, number=1,) + page_size = proto.Field(proto.INT32, number=2,) + page_token = proto.Field(proto.STRING, number=3,) + filter = proto.Field(proto.STRING, number=4,) + order_by = proto.Field(proto.STRING, number=5,) + + +class ListBackupsResponse(proto.Message): + r"""Response message for + [DataprocMetastore.ListBackups][google.cloud.metastore.v1.DataprocMetastore.ListBackups]. + + Attributes: + backups (Sequence[google.cloud.metastore_v1.types.Backup]): + The backups of the specified service. + next_page_token (str): + A token that can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. + unreachable (Sequence[str]): + Locations that could not be reached. + """ + + @property + def raw_page(self): + return self + + backups = proto.RepeatedField(proto.MESSAGE, number=1, message="Backup",) + next_page_token = proto.Field(proto.STRING, number=2,) + unreachable = proto.RepeatedField(proto.STRING, number=3,) + + +class GetBackupRequest(proto.Message): + r"""Request message for + [DataprocMetastore.GetBackup][google.cloud.metastore.v1.DataprocMetastore.GetBackup]. + + Attributes: + name (str): + Required. The relative resource name of the backup to + retrieve, in the following form: + + ``projects/{project_number}/locations/{location_id}/services/{service_id}/backups/{backup_id}``. + """ + + name = proto.Field(proto.STRING, number=1,) + + +class CreateBackupRequest(proto.Message): + r"""Request message for + [DataprocMetastore.CreateBackup][google.cloud.metastore.v1.DataprocMetastore.CreateBackup]. + + Attributes: + parent (str): + Required. The relative resource name of the service in which + to create a backup of the following form: + + ``projects/{project_number}/locations/{location_id}/services/{service_id}``. + backup_id (str): + Required. The ID of the backup, which is used + as the final component of the backup's name. + + This value must be between 1 and 64 characters + long, begin with a letter, end with a letter or + number, and consist of alpha-numeric ASCII + characters or hyphens. + backup (google.cloud.metastore_v1.types.Backup): + Required. The backup to create. The ``name`` field is + ignored. The ID of the created backup must be provided in + the request's ``backup_id`` field. + request_id (str): + Optional. A request ID. Specify a unique request ID to allow + the server to ignore the request if it has completed. The + server will ignore subsequent requests that provide a + duplicate request ID for at least 60 minutes after the first + request. + + For example, if an initial request times out, followed by + another request with the same request ID, the server ignores + the second request to prevent the creation of duplicate + commitments. + + The request ID must be a valid + `UUID `__ + A zero UUID (00000000-0000-0000-0000-000000000000) is not + supported. + """ + + parent = proto.Field(proto.STRING, number=1,) + backup_id = proto.Field(proto.STRING, number=2,) + backup = proto.Field(proto.MESSAGE, number=3, message="Backup",) + request_id = proto.Field(proto.STRING, number=4,) + + +class DeleteBackupRequest(proto.Message): + r"""Request message for + [DataprocMetastore.DeleteBackup][google.cloud.metastore.v1.DataprocMetastore.DeleteBackup]. + + Attributes: + name (str): + Required. The relative resource name of the backup to + delete, in the following form: + + ``projects/{project_number}/locations/{location_id}/services/{service_id}/backups/{backup_id}``. + request_id (str): + Optional. A request ID. Specify a unique request ID to allow + the server to ignore the request if it has completed. The + server will ignore subsequent requests that provide a + duplicate request ID for at least 60 minutes after the first + request. + + For example, if an initial request times out, followed by + another request with the same request ID, the server ignores + the second request to prevent the creation of duplicate + commitments. + + The request ID must be a valid + `UUID `__ + A zero UUID (00000000-0000-0000-0000-000000000000) is not + supported. + """ + + name = proto.Field(proto.STRING, number=1,) + request_id = proto.Field(proto.STRING, number=2,) + + class ExportMetadataRequest(proto.Message): r"""Request message for [DataprocMetastore.ExportMetadata][google.cloud.metastore.v1.DataprocMetastore.ExportMetadata]. @@ -785,6 +1052,46 @@ class ExportMetadataRequest(proto.Message): ) +class RestoreServiceRequest(proto.Message): + r"""Request message for [DataprocMetastore.Restore][]. + Attributes: + service (str): + Required. The relative resource name of the metastore + service to run restore, in the following form: + + ``projects/{project_id}/locations/{location_id}/services/{service_id}``. + backup (str): + Required. The relative resource name of the metastore + service backup to restore from, in the following form: + + ``projects/{project_id}/locations/{location_id}/services/{service_id}/backups/{backup_id}``. + restore_type (google.cloud.metastore_v1.types.Restore.RestoreType): + Optional. The type of restore. If unspecified, defaults to + ``METADATA_ONLY``. + request_id (str): + Optional. A request ID. Specify a unique request ID to allow + the server to ignore the request if it has completed. The + server will ignore subsequent requests that provide a + duplicate request ID for at least 60 minutes after the first + request. + + For example, if an initial request times out, followed by + another request with the same request ID, the server ignores + the second request to prevent the creation of duplicate + commitments. + + The request ID must be a valid + `UUID `__. + A zero UUID (00000000-0000-0000-0000-000000000000) is not + supported. + """ + + service = proto.Field(proto.STRING, number=1,) + backup = proto.Field(proto.STRING, number=2,) + restore_type = proto.Field(proto.ENUM, number=3, enum="Restore.RestoreType",) + request_id = proto.Field(proto.STRING, number=4,) + + class OperationMetadata(proto.Message): r"""Represents the metadata of a long-running operation. Attributes: @@ -864,6 +1171,7 @@ class Type(proto.Enum): r"""The type of the database dump.""" TYPE_UNSPECIFIED = 0 MYSQL = 1 + AVRO = 2 __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/scripts/fixup_metastore_v1_keywords.py b/scripts/fixup_metastore_v1_keywords.py index e09d167..2cdabbc 100644 --- a/scripts/fixup_metastore_v1_keywords.py +++ b/scripts/fixup_metastore_v1_keywords.py @@ -39,14 +39,19 @@ def partition( class metastoreCallTransformer(cst.CSTTransformer): CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'create_backup': ('parent', 'backup_id', 'backup', 'request_id', ), 'create_metadata_import': ('parent', 'metadata_import_id', 'metadata_import', 'request_id', ), 'create_service': ('parent', 'service_id', 'service', 'request_id', ), + 'delete_backup': ('name', 'request_id', ), 'delete_service': ('name', 'request_id', ), 'export_metadata': ('service', 'destination_gcs_folder', 'request_id', 'database_dump_type', ), + 'get_backup': ('name', ), 'get_metadata_import': ('name', ), 'get_service': ('name', ), + 'list_backups': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), 'list_metadata_imports': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), 'list_services': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'restore_service': ('service', 'backup', 'restore_type', 'request_id', ), 'update_metadata_import': ('update_mask', 'metadata_import', 'request_id', ), 'update_service': ('update_mask', 'service', 'request_id', ), } diff --git a/tests/unit/gapic/metastore_v1/test_dataproc_metastore.py b/tests/unit/gapic/metastore_v1/test_dataproc_metastore.py index 970cccc..8ad7261 100644 --- a/tests/unit/gapic/metastore_v1/test_dataproc_metastore.py +++ b/tests/unit/gapic/metastore_v1/test_dataproc_metastore.py @@ -3049,6 +3049,1181 @@ async def test_export_metadata_field_headers_async(): assert ("x-goog-request-params", "service=service/value",) in kw["metadata"] +def test_restore_service( + transport: str = "grpc", request_type=metastore.RestoreServiceRequest +): + client = DataprocMetastoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.restore_service), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.restore_service(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.RestoreServiceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_restore_service_from_dict(): + test_restore_service(request_type=dict) + + +def test_restore_service_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataprocMetastoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.restore_service), "__call__") as call: + client.restore_service() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.RestoreServiceRequest() + + +@pytest.mark.asyncio +async def test_restore_service_async( + transport: str = "grpc_asyncio", request_type=metastore.RestoreServiceRequest +): + client = DataprocMetastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.restore_service), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.restore_service(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.RestoreServiceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_restore_service_async_from_dict(): + await test_restore_service_async(request_type=dict) + + +def test_restore_service_field_headers(): + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metastore.RestoreServiceRequest() + + request.service = "service/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.restore_service), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.restore_service(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "service=service/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_restore_service_field_headers_async(): + client = DataprocMetastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metastore.RestoreServiceRequest() + + request.service = "service/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.restore_service), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.restore_service(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "service=service/value",) in kw["metadata"] + + +def test_restore_service_flattened(): + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.restore_service), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.restore_service( + service="service_value", backup="backup_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].service == "service_value" + assert args[0].backup == "backup_value" + + +def test_restore_service_flattened_error(): + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.restore_service( + metastore.RestoreServiceRequest(), + service="service_value", + backup="backup_value", + ) + + +@pytest.mark.asyncio +async def test_restore_service_flattened_async(): + client = DataprocMetastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.restore_service), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.restore_service( + service="service_value", backup="backup_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].service == "service_value" + assert args[0].backup == "backup_value" + + +@pytest.mark.asyncio +async def test_restore_service_flattened_error_async(): + client = DataprocMetastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.restore_service( + metastore.RestoreServiceRequest(), + service="service_value", + backup="backup_value", + ) + + +def test_list_backups( + transport: str = "grpc", request_type=metastore.ListBackupsRequest +): + client = DataprocMetastoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_backups), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = metastore.ListBackupsResponse( + next_page_token="next_page_token_value", unreachable=["unreachable_value"], + ) + response = client.list_backups(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.ListBackupsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListBackupsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_backups_from_dict(): + test_list_backups(request_type=dict) + + +def test_list_backups_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataprocMetastoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_backups), "__call__") as call: + client.list_backups() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.ListBackupsRequest() + + +@pytest.mark.asyncio +async def test_list_backups_async( + transport: str = "grpc_asyncio", request_type=metastore.ListBackupsRequest +): + client = DataprocMetastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_backups), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + metastore.ListBackupsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_backups(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.ListBackupsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListBackupsAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.asyncio +async def test_list_backups_async_from_dict(): + await test_list_backups_async(request_type=dict) + + +def test_list_backups_field_headers(): + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metastore.ListBackupsRequest() + + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_backups), "__call__") as call: + call.return_value = metastore.ListBackupsResponse() + client.list_backups(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_backups_field_headers_async(): + client = DataprocMetastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metastore.ListBackupsRequest() + + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_backups), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + metastore.ListBackupsResponse() + ) + await client.list_backups(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +def test_list_backups_flattened(): + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_backups), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = metastore.ListBackupsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_backups(parent="parent_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].parent == "parent_value" + + +def test_list_backups_flattened_error(): + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_backups( + metastore.ListBackupsRequest(), parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_backups_flattened_async(): + client = DataprocMetastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_backups), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = metastore.ListBackupsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + metastore.ListBackupsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_backups(parent="parent_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].parent == "parent_value" + + +@pytest.mark.asyncio +async def test_list_backups_flattened_error_async(): + client = DataprocMetastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_backups( + metastore.ListBackupsRequest(), parent="parent_value", + ) + + +def test_list_backups_pager(): + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_backups), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + metastore.ListBackupsResponse( + backups=[metastore.Backup(), metastore.Backup(), metastore.Backup(),], + next_page_token="abc", + ), + metastore.ListBackupsResponse(backups=[], next_page_token="def",), + metastore.ListBackupsResponse( + backups=[metastore.Backup(),], next_page_token="ghi", + ), + metastore.ListBackupsResponse( + backups=[metastore.Backup(), metastore.Backup(),], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_backups(request={}) + + assert pager._metadata == metadata + + results = [i for i in pager] + assert len(results) == 6 + assert all(isinstance(i, metastore.Backup) for i in results) + + +def test_list_backups_pages(): + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_backups), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + metastore.ListBackupsResponse( + backups=[metastore.Backup(), metastore.Backup(), metastore.Backup(),], + next_page_token="abc", + ), + metastore.ListBackupsResponse(backups=[], next_page_token="def",), + metastore.ListBackupsResponse( + backups=[metastore.Backup(),], next_page_token="ghi", + ), + metastore.ListBackupsResponse( + backups=[metastore.Backup(), metastore.Backup(),], + ), + RuntimeError, + ) + pages = list(client.list_backups(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_backups_async_pager(): + client = DataprocMetastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backups), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + metastore.ListBackupsResponse( + backups=[metastore.Backup(), metastore.Backup(), metastore.Backup(),], + next_page_token="abc", + ), + metastore.ListBackupsResponse(backups=[], next_page_token="def",), + metastore.ListBackupsResponse( + backups=[metastore.Backup(),], next_page_token="ghi", + ), + metastore.ListBackupsResponse( + backups=[metastore.Backup(), metastore.Backup(),], + ), + RuntimeError, + ) + async_pager = await client.list_backups(request={},) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, metastore.Backup) for i in responses) + + +@pytest.mark.asyncio +async def test_list_backups_async_pages(): + client = DataprocMetastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backups), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + metastore.ListBackupsResponse( + backups=[metastore.Backup(), metastore.Backup(), metastore.Backup(),], + next_page_token="abc", + ), + metastore.ListBackupsResponse(backups=[], next_page_token="def",), + metastore.ListBackupsResponse( + backups=[metastore.Backup(),], next_page_token="ghi", + ), + metastore.ListBackupsResponse( + backups=[metastore.Backup(), metastore.Backup(),], + ), + RuntimeError, + ) + pages = [] + async for page_ in (await client.list_backups(request={})).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_get_backup(transport: str = "grpc", request_type=metastore.GetBackupRequest): + client = DataprocMetastoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = metastore.Backup( + name="name_value", + state=metastore.Backup.State.CREATING, + description="description_value", + restoring_services=["restoring_services_value"], + ) + response = client.get_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.GetBackupRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, metastore.Backup) + assert response.name == "name_value" + assert response.state == metastore.Backup.State.CREATING + assert response.description == "description_value" + assert response.restoring_services == ["restoring_services_value"] + + +def test_get_backup_from_dict(): + test_get_backup(request_type=dict) + + +def test_get_backup_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataprocMetastoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup), "__call__") as call: + client.get_backup() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.GetBackupRequest() + + +@pytest.mark.asyncio +async def test_get_backup_async( + transport: str = "grpc_asyncio", request_type=metastore.GetBackupRequest +): + client = DataprocMetastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + metastore.Backup( + name="name_value", + state=metastore.Backup.State.CREATING, + description="description_value", + restoring_services=["restoring_services_value"], + ) + ) + response = await client.get_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.GetBackupRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, metastore.Backup) + assert response.name == "name_value" + assert response.state == metastore.Backup.State.CREATING + assert response.description == "description_value" + assert response.restoring_services == ["restoring_services_value"] + + +@pytest.mark.asyncio +async def test_get_backup_async_from_dict(): + await test_get_backup_async(request_type=dict) + + +def test_get_backup_field_headers(): + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metastore.GetBackupRequest() + + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup), "__call__") as call: + call.return_value = metastore.Backup() + client.get_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_backup_field_headers_async(): + client = DataprocMetastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metastore.GetBackupRequest() + + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Backup()) + await client.get_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_get_backup_flattened(): + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = metastore.Backup() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_backup(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].name == "name_value" + + +def test_get_backup_flattened_error(): + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_backup( + metastore.GetBackupRequest(), name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_backup_flattened_async(): + client = DataprocMetastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = metastore.Backup() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Backup()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_backup(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].name == "name_value" + + +@pytest.mark.asyncio +async def test_get_backup_flattened_error_async(): + client = DataprocMetastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_backup( + metastore.GetBackupRequest(), name="name_value", + ) + + +def test_create_backup( + transport: str = "grpc", request_type=metastore.CreateBackupRequest +): + client = DataprocMetastoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.CreateBackupRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_backup_from_dict(): + test_create_backup(request_type=dict) + + +def test_create_backup_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataprocMetastoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_backup), "__call__") as call: + client.create_backup() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.CreateBackupRequest() + + +@pytest.mark.asyncio +async def test_create_backup_async( + transport: str = "grpc_asyncio", request_type=metastore.CreateBackupRequest +): + client = DataprocMetastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.CreateBackupRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_backup_async_from_dict(): + await test_create_backup_async(request_type=dict) + + +def test_create_backup_field_headers(): + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metastore.CreateBackupRequest() + + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_backup), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_backup_field_headers_async(): + client = DataprocMetastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metastore.CreateBackupRequest() + + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_backup), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.create_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +def test_create_backup_flattened(): + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_backup( + parent="parent_value", + backup=metastore.Backup(name="name_value"), + backup_id="backup_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].parent == "parent_value" + assert args[0].backup == metastore.Backup(name="name_value") + assert args[0].backup_id == "backup_id_value" + + +def test_create_backup_flattened_error(): + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_backup( + metastore.CreateBackupRequest(), + parent="parent_value", + backup=metastore.Backup(name="name_value"), + backup_id="backup_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_backup_flattened_async(): + client = DataprocMetastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_backup( + parent="parent_value", + backup=metastore.Backup(name="name_value"), + backup_id="backup_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].parent == "parent_value" + assert args[0].backup == metastore.Backup(name="name_value") + assert args[0].backup_id == "backup_id_value" + + +@pytest.mark.asyncio +async def test_create_backup_flattened_error_async(): + client = DataprocMetastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_backup( + metastore.CreateBackupRequest(), + parent="parent_value", + backup=metastore.Backup(name="name_value"), + backup_id="backup_id_value", + ) + + +def test_delete_backup( + transport: str = "grpc", request_type=metastore.DeleteBackupRequest +): + client = DataprocMetastoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.DeleteBackupRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_backup_from_dict(): + test_delete_backup(request_type=dict) + + +def test_delete_backup_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataprocMetastoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: + client.delete_backup() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.DeleteBackupRequest() + + +@pytest.mark.asyncio +async def test_delete_backup_async( + transport: str = "grpc_asyncio", request_type=metastore.DeleteBackupRequest +): + client = DataprocMetastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.DeleteBackupRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_backup_async_from_dict(): + await test_delete_backup_async(request_type=dict) + + +def test_delete_backup_field_headers(): + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metastore.DeleteBackupRequest() + + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_backup_field_headers_async(): + client = DataprocMetastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metastore.DeleteBackupRequest() + + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.delete_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_delete_backup_flattened(): + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_backup(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].name == "name_value" + + +def test_delete_backup_flattened_error(): + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_backup( + metastore.DeleteBackupRequest(), name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_backup_flattened_async(): + client = DataprocMetastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_backup(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].name == "name_value" + + +@pytest.mark.asyncio +async def test_delete_backup_flattened_error_async(): + client = DataprocMetastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_backup( + metastore.DeleteBackupRequest(), name="name_value", + ) + + def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.DataprocMetastoreGrpcTransport( @@ -3156,6 +4331,11 @@ def test_dataproc_metastore_base_transport(): "create_metadata_import", "update_metadata_import", "export_metadata", + "restore_service", + "list_backups", + "get_backup", + "create_backup", + "delete_backup", ) for method in methods: with pytest.raises(NotImplementedError): @@ -3535,11 +4715,37 @@ def test_dataproc_metastore_grpc_lro_async_client(): assert transport.operations_client is transport.operations_client -def test_metadata_import_path(): +def test_backup_path(): project = "squid" location = "clam" service = "whelk" - metadata_import = "octopus" + backup = "octopus" + expected = "projects/{project}/locations/{location}/services/{service}/backups/{backup}".format( + project=project, location=location, service=service, backup=backup, + ) + actual = DataprocMetastoreClient.backup_path(project, location, service, backup) + assert expected == actual + + +def test_parse_backup_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + "service": "cuttlefish", + "backup": "mussel", + } + path = DataprocMetastoreClient.backup_path(**expected) + + # Check that the path construction is reversible. + actual = DataprocMetastoreClient.parse_backup_path(path) + assert expected == actual + + +def test_metadata_import_path(): + project = "winkle" + location = "nautilus" + service = "scallop" + metadata_import = "abalone" expected = "projects/{project}/locations/{location}/services/{service}/metadataImports/{metadata_import}".format( project=project, location=location, @@ -3554,10 +4760,10 @@ def test_metadata_import_path(): def test_parse_metadata_import_path(): expected = { - "project": "oyster", - "location": "nudibranch", - "service": "cuttlefish", - "metadata_import": "mussel", + "project": "squid", + "location": "clam", + "service": "whelk", + "metadata_import": "octopus", } path = DataprocMetastoreClient.metadata_import_path(**expected) @@ -3567,8 +4773,8 @@ def test_parse_metadata_import_path(): def test_network_path(): - project = "winkle" - network = "nautilus" + project = "oyster" + network = "nudibranch" expected = "projects/{project}/global/networks/{network}".format( project=project, network=network, ) @@ -3578,8 +4784,8 @@ def test_network_path(): def test_parse_network_path(): expected = { - "project": "scallop", - "network": "abalone", + "project": "cuttlefish", + "network": "mussel", } path = DataprocMetastoreClient.network_path(**expected) @@ -3589,9 +4795,9 @@ def test_parse_network_path(): def test_service_path(): - project = "squid" - location = "clam" - service = "whelk" + project = "winkle" + location = "nautilus" + service = "scallop" expected = "projects/{project}/locations/{location}/services/{service}".format( project=project, location=location, service=service, ) @@ -3601,9 +4807,9 @@ def test_service_path(): def test_parse_service_path(): expected = { - "project": "octopus", - "location": "oyster", - "service": "nudibranch", + "project": "abalone", + "location": "squid", + "service": "clam", } path = DataprocMetastoreClient.service_path(**expected) @@ -3613,7 +4819,7 @@ def test_parse_service_path(): def test_common_billing_account_path(): - billing_account = "cuttlefish" + billing_account = "whelk" expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -3623,7 +4829,7 @@ def test_common_billing_account_path(): def test_parse_common_billing_account_path(): expected = { - "billing_account": "mussel", + "billing_account": "octopus", } path = DataprocMetastoreClient.common_billing_account_path(**expected) @@ -3633,7 +4839,7 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): - folder = "winkle" + folder = "oyster" expected = "folders/{folder}".format(folder=folder,) actual = DataprocMetastoreClient.common_folder_path(folder) assert expected == actual @@ -3641,7 +4847,7 @@ def test_common_folder_path(): def test_parse_common_folder_path(): expected = { - "folder": "nautilus", + "folder": "nudibranch", } path = DataprocMetastoreClient.common_folder_path(**expected) @@ -3651,7 +4857,7 @@ def test_parse_common_folder_path(): def test_common_organization_path(): - organization = "scallop" + organization = "cuttlefish" expected = "organizations/{organization}".format(organization=organization,) actual = DataprocMetastoreClient.common_organization_path(organization) assert expected == actual @@ -3659,7 +4865,7 @@ def test_common_organization_path(): def test_parse_common_organization_path(): expected = { - "organization": "abalone", + "organization": "mussel", } path = DataprocMetastoreClient.common_organization_path(**expected) @@ -3669,7 +4875,7 @@ def test_parse_common_organization_path(): def test_common_project_path(): - project = "squid" + project = "winkle" expected = "projects/{project}".format(project=project,) actual = DataprocMetastoreClient.common_project_path(project) assert expected == actual @@ -3677,7 +4883,7 @@ def test_common_project_path(): def test_parse_common_project_path(): expected = { - "project": "clam", + "project": "nautilus", } path = DataprocMetastoreClient.common_project_path(**expected) @@ -3687,8 +4893,8 @@ def test_parse_common_project_path(): def test_common_location_path(): - project = "whelk" - location = "octopus" + project = "scallop" + location = "abalone" expected = "projects/{project}/locations/{location}".format( project=project, location=location, ) @@ -3698,8 +4904,8 @@ def test_common_location_path(): def test_parse_common_location_path(): expected = { - "project": "oyster", - "location": "nudibranch", + "project": "squid", + "location": "clam", } path = DataprocMetastoreClient.common_location_path(**expected)