From 4d054d92fed4296883e5ae09b99d57bd74d68fb4 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Wed, 2 Jun 2021 12:48:05 -0400 Subject: [PATCH] feat: add v1 (#28) --- .github/.OwlBot.yaml | 3 - docs/index.rst | 10 +- docs/metastore_v1/dataproc_metastore.rst | 10 + docs/metastore_v1/services.rst | 6 + docs/metastore_v1/types.rst | 7 + docs/metastore_v1alpha/dataproc_metastore.rst | 1 - docs/metastore_v1beta/dataproc_metastore.rst | 1 - google/cloud/metastore/__init__.py | 80 +- google/cloud/metastore/py.typed | 2 +- google/cloud/metastore_v1/__init__.py | 70 + google/cloud/metastore_v1/gapic_metadata.json | 123 + google/cloud/metastore_v1/py.typed | 2 + .../cloud/metastore_v1/services/__init__.py | 15 + .../services/dataproc_metastore/__init__.py | 22 + .../dataproc_metastore/async_client.py | 1104 +++++ .../services/dataproc_metastore/client.py | 1322 ++++++ .../services/dataproc_metastore/pagers.py | 283 ++ .../dataproc_metastore/transports/__init__.py | 33 + .../dataproc_metastore/transports/base.py | 308 ++ .../dataproc_metastore/transports/grpc.py | 528 +++ .../transports/grpc_asyncio.py | 552 +++ google/cloud/metastore_v1/types/__init__.py | 66 + google/cloud/metastore_v1/types/metastore.py | 869 ++++ google/cloud/metastore_v1alpha/__init__.py | 9 +- .../metastore_v1alpha/gapic_metadata.json | 173 + .../metastore_v1alpha/services/__init__.py | 1 - .../services/dataproc_metastore/__init__.py | 2 - .../dataproc_metastore/async_client.py | 68 +- .../services/dataproc_metastore/client.py | 131 +- .../services/dataproc_metastore/pagers.py | 8 +- .../dataproc_metastore/transports/__init__.py | 2 - .../dataproc_metastore/transports/base.py | 170 +- .../dataproc_metastore/transports/grpc.py | 60 +- .../transports/grpc_asyncio.py | 71 +- .../cloud/metastore_v1alpha/types/__init__.py | 2 - .../metastore_v1alpha/types/metastore.py | 300 +- google/cloud/metastore_v1beta/__init__.py | 9 +- .../metastore_v1beta/gapic_metadata.json | 173 + .../metastore_v1beta/services/__init__.py | 1 - .../services/dataproc_metastore/__init__.py | 2 - .../dataproc_metastore/async_client.py | 68 +- .../services/dataproc_metastore/client.py | 131 +- .../services/dataproc_metastore/pagers.py | 8 +- .../dataproc_metastore/transports/__init__.py | 2 - .../dataproc_metastore/transports/base.py | 170 +- .../dataproc_metastore/transports/grpc.py | 60 +- .../transports/grpc_asyncio.py | 71 +- .../cloud/metastore_v1beta/types/__init__.py | 2 - .../cloud/metastore_v1beta/types/metastore.py | 300 +- owlbot.py | 2 +- scripts/fixup_metastore_v1_keywords.py | 185 + scripts/fixup_metastore_v1alpha_keywords.py | 35 +- scripts/fixup_metastore_v1beta_keywords.py | 35 +- tests/__init__.py | 15 + tests/unit/__init__.py | 15 + tests/unit/gapic/__init__.py | 15 + tests/unit/gapic/metastore_v1/__init__.py | 15 + .../metastore_v1/test_dataproc_metastore.py | 3780 +++++++++++++++++ .../unit/gapic/metastore_v1alpha/__init__.py | 1 - .../test_dataproc_metastore.py | 867 ++-- tests/unit/gapic/metastore_v1beta/__init__.py | 1 - .../test_dataproc_metastore.py | 867 ++-- 62 files changed, 11350 insertions(+), 1894 deletions(-) create mode 100644 docs/metastore_v1/dataproc_metastore.rst create mode 100644 docs/metastore_v1/services.rst create mode 100644 docs/metastore_v1/types.rst create mode 100644 google/cloud/metastore_v1/__init__.py create mode 100644 google/cloud/metastore_v1/gapic_metadata.json create mode 100644 google/cloud/metastore_v1/py.typed create mode 100644 google/cloud/metastore_v1/services/__init__.py create mode 100644 google/cloud/metastore_v1/services/dataproc_metastore/__init__.py create mode 100644 google/cloud/metastore_v1/services/dataproc_metastore/async_client.py create mode 100644 google/cloud/metastore_v1/services/dataproc_metastore/client.py create mode 100644 google/cloud/metastore_v1/services/dataproc_metastore/pagers.py create mode 100644 google/cloud/metastore_v1/services/dataproc_metastore/transports/__init__.py create mode 100644 google/cloud/metastore_v1/services/dataproc_metastore/transports/base.py create mode 100644 google/cloud/metastore_v1/services/dataproc_metastore/transports/grpc.py create mode 100644 google/cloud/metastore_v1/services/dataproc_metastore/transports/grpc_asyncio.py create mode 100644 google/cloud/metastore_v1/types/__init__.py create mode 100644 google/cloud/metastore_v1/types/metastore.py create mode 100644 google/cloud/metastore_v1alpha/gapic_metadata.json create mode 100644 google/cloud/metastore_v1beta/gapic_metadata.json create mode 100644 scripts/fixup_metastore_v1_keywords.py create mode 100644 tests/__init__.py create mode 100644 tests/unit/__init__.py create mode 100644 tests/unit/gapic/__init__.py create mode 100644 tests/unit/gapic/metastore_v1/__init__.py create mode 100644 tests/unit/gapic/metastore_v1/test_dataproc_metastore.py diff --git a/.github/.OwlBot.yaml b/.github/.OwlBot.yaml index dc38b85..a43ec1a 100644 --- a/.github/.OwlBot.yaml +++ b/.github/.OwlBot.yaml @@ -18,9 +18,6 @@ docker: deep-remove-regex: - /owl-bot-staging -deep-preserve-regex: - - /owl-bot-staging/v1 - deep-copy-regex: - source: /google/cloud/metastore/(v.*)/.*-py/(.*) dest: /owl-bot-staging/$1/$2 diff --git a/docs/index.rst b/docs/index.rst index cc22fc9..7e2963f 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -3,7 +3,15 @@ .. include:: multiprocessing.rst This package includes clients for multiple versions of the Dataproc Metastore -API. By default, you will get ``v1beta``, the latest version. +API. By default, you will get ``v1``, the latest version. + +v1 API Reference +-------------------- +.. toctree:: + :maxdepth: 2 + + metastore_v1/services + metastore_v1/types v1beta API Reference -------------------- diff --git a/docs/metastore_v1/dataproc_metastore.rst b/docs/metastore_v1/dataproc_metastore.rst new file mode 100644 index 0000000..8be84f5 --- /dev/null +++ b/docs/metastore_v1/dataproc_metastore.rst @@ -0,0 +1,10 @@ +DataprocMetastore +----------------------------------- + +.. automodule:: google.cloud.metastore_v1.services.dataproc_metastore + :members: + :inherited-members: + +.. automodule:: google.cloud.metastore_v1.services.dataproc_metastore.pagers + :members: + :inherited-members: diff --git a/docs/metastore_v1/services.rst b/docs/metastore_v1/services.rst new file mode 100644 index 0000000..a3f1003 --- /dev/null +++ b/docs/metastore_v1/services.rst @@ -0,0 +1,6 @@ +Services for Google Cloud Metastore v1 API +========================================== +.. toctree:: + :maxdepth: 2 + + dataproc_metastore diff --git a/docs/metastore_v1/types.rst b/docs/metastore_v1/types.rst new file mode 100644 index 0000000..48f7b8e --- /dev/null +++ b/docs/metastore_v1/types.rst @@ -0,0 +1,7 @@ +Types for Google Cloud Metastore v1 API +======================================= + +.. automodule:: google.cloud.metastore_v1.types + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/metastore_v1alpha/dataproc_metastore.rst b/docs/metastore_v1alpha/dataproc_metastore.rst index eb8e51f..9806d0e 100644 --- a/docs/metastore_v1alpha/dataproc_metastore.rst +++ b/docs/metastore_v1alpha/dataproc_metastore.rst @@ -5,7 +5,6 @@ DataprocMetastore :members: :inherited-members: - .. automodule:: google.cloud.metastore_v1alpha.services.dataproc_metastore.pagers :members: :inherited-members: diff --git a/docs/metastore_v1beta/dataproc_metastore.rst b/docs/metastore_v1beta/dataproc_metastore.rst index 0efeec7..04c8322 100644 --- a/docs/metastore_v1beta/dataproc_metastore.rst +++ b/docs/metastore_v1beta/dataproc_metastore.rst @@ -5,7 +5,6 @@ DataprocMetastore :members: :inherited-members: - .. automodule:: google.cloud.metastore_v1beta.services.dataproc_metastore.pagers :members: :inherited-members: diff --git a/google/cloud/metastore/__init__.py b/google/cloud/metastore/__init__.py index 468560c..79d5194 100644 --- a/google/cloud/metastore/__init__.py +++ b/google/cloud/metastore/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -15,65 +14,49 @@ # limitations under the License. # -from google.cloud.metastore_v1beta.services.dataproc_metastore.async_client import ( - DataprocMetastoreAsyncClient, -) -from google.cloud.metastore_v1beta.services.dataproc_metastore.client import ( +from google.cloud.metastore_v1.services.dataproc_metastore.client import ( DataprocMetastoreClient, ) -from google.cloud.metastore_v1beta.types.metastore import Backup -from google.cloud.metastore_v1beta.types.metastore import CreateBackupRequest -from google.cloud.metastore_v1beta.types.metastore import CreateMetadataImportRequest -from google.cloud.metastore_v1beta.types.metastore import CreateServiceRequest -from google.cloud.metastore_v1beta.types.metastore import DataCatalogConfig -from google.cloud.metastore_v1beta.types.metastore import DatabaseDumpSpec -from google.cloud.metastore_v1beta.types.metastore import DeleteBackupRequest -from google.cloud.metastore_v1beta.types.metastore import DeleteServiceRequest -from google.cloud.metastore_v1beta.types.metastore import ExportMetadataRequest -from google.cloud.metastore_v1beta.types.metastore import GetBackupRequest -from google.cloud.metastore_v1beta.types.metastore import GetMetadataImportRequest -from google.cloud.metastore_v1beta.types.metastore import GetServiceRequest -from google.cloud.metastore_v1beta.types.metastore import HiveMetastoreConfig -from google.cloud.metastore_v1beta.types.metastore import KerberosConfig -from google.cloud.metastore_v1beta.types.metastore import ListBackupsRequest -from google.cloud.metastore_v1beta.types.metastore import ListBackupsResponse -from google.cloud.metastore_v1beta.types.metastore import ListMetadataImportsRequest -from google.cloud.metastore_v1beta.types.metastore import ListMetadataImportsResponse -from google.cloud.metastore_v1beta.types.metastore import ListServicesRequest -from google.cloud.metastore_v1beta.types.metastore import ListServicesResponse -from google.cloud.metastore_v1beta.types.metastore import LocationMetadata -from google.cloud.metastore_v1beta.types.metastore import MaintenanceWindow -from google.cloud.metastore_v1beta.types.metastore import MetadataExport -from google.cloud.metastore_v1beta.types.metastore import MetadataImport -from google.cloud.metastore_v1beta.types.metastore import MetadataIntegration -from google.cloud.metastore_v1beta.types.metastore import MetadataManagementActivity -from google.cloud.metastore_v1beta.types.metastore import OperationMetadata -from google.cloud.metastore_v1beta.types.metastore import Restore -from google.cloud.metastore_v1beta.types.metastore import RestoreServiceRequest -from google.cloud.metastore_v1beta.types.metastore import Secret -from google.cloud.metastore_v1beta.types.metastore import Service -from google.cloud.metastore_v1beta.types.metastore import UpdateMetadataImportRequest -from google.cloud.metastore_v1beta.types.metastore import UpdateServiceRequest +from google.cloud.metastore_v1.services.dataproc_metastore.async_client import ( + DataprocMetastoreAsyncClient, +) + +from google.cloud.metastore_v1.types.metastore import CreateMetadataImportRequest +from google.cloud.metastore_v1.types.metastore import CreateServiceRequest +from google.cloud.metastore_v1.types.metastore import DatabaseDumpSpec +from google.cloud.metastore_v1.types.metastore import DeleteServiceRequest +from google.cloud.metastore_v1.types.metastore import ExportMetadataRequest +from google.cloud.metastore_v1.types.metastore import GetMetadataImportRequest +from google.cloud.metastore_v1.types.metastore import GetServiceRequest +from google.cloud.metastore_v1.types.metastore import HiveMetastoreConfig +from google.cloud.metastore_v1.types.metastore import KerberosConfig +from google.cloud.metastore_v1.types.metastore import ListMetadataImportsRequest +from google.cloud.metastore_v1.types.metastore import ListMetadataImportsResponse +from google.cloud.metastore_v1.types.metastore import ListServicesRequest +from google.cloud.metastore_v1.types.metastore import ListServicesResponse +from google.cloud.metastore_v1.types.metastore import LocationMetadata +from google.cloud.metastore_v1.types.metastore import MaintenanceWindow +from google.cloud.metastore_v1.types.metastore import MetadataExport +from google.cloud.metastore_v1.types.metastore import MetadataImport +from google.cloud.metastore_v1.types.metastore import MetadataManagementActivity +from google.cloud.metastore_v1.types.metastore import OperationMetadata +from google.cloud.metastore_v1.types.metastore import Secret +from google.cloud.metastore_v1.types.metastore import Service +from google.cloud.metastore_v1.types.metastore import UpdateMetadataImportRequest +from google.cloud.metastore_v1.types.metastore import UpdateServiceRequest __all__ = ( - "Backup", - "CreateBackupRequest", + "DataprocMetastoreClient", + "DataprocMetastoreAsyncClient", "CreateMetadataImportRequest", "CreateServiceRequest", - "DataCatalogConfig", "DatabaseDumpSpec", - "DataprocMetastoreAsyncClient", - "DataprocMetastoreClient", - "DeleteBackupRequest", "DeleteServiceRequest", "ExportMetadataRequest", - "GetBackupRequest", "GetMetadataImportRequest", "GetServiceRequest", "HiveMetastoreConfig", "KerberosConfig", - "ListBackupsRequest", - "ListBackupsResponse", "ListMetadataImportsRequest", "ListMetadataImportsResponse", "ListServicesRequest", @@ -82,11 +65,8 @@ "MaintenanceWindow", "MetadataExport", "MetadataImport", - "MetadataIntegration", "MetadataManagementActivity", "OperationMetadata", - "Restore", - "RestoreServiceRequest", "Secret", "Service", "UpdateMetadataImportRequest", diff --git a/google/cloud/metastore/py.typed b/google/cloud/metastore/py.typed index f949c43..72edca0 100644 --- a/google/cloud/metastore/py.typed +++ b/google/cloud/metastore/py.typed @@ -1,2 +1,2 @@ # Marker file for PEP 561. -# The google-cloud-dataproc-metastore package uses inline types. +# The google-cloud-metastore package uses inline types. diff --git a/google/cloud/metastore_v1/__init__.py b/google/cloud/metastore_v1/__init__.py new file mode 100644 index 0000000..4410753 --- /dev/null +++ b/google/cloud/metastore_v1/__init__.py @@ -0,0 +1,70 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from .services.dataproc_metastore import DataprocMetastoreClient +from .services.dataproc_metastore import DataprocMetastoreAsyncClient + +from .types.metastore import CreateMetadataImportRequest +from .types.metastore import CreateServiceRequest +from .types.metastore import DatabaseDumpSpec +from .types.metastore import DeleteServiceRequest +from .types.metastore import ExportMetadataRequest +from .types.metastore import GetMetadataImportRequest +from .types.metastore import GetServiceRequest +from .types.metastore import HiveMetastoreConfig +from .types.metastore import KerberosConfig +from .types.metastore import ListMetadataImportsRequest +from .types.metastore import ListMetadataImportsResponse +from .types.metastore import ListServicesRequest +from .types.metastore import ListServicesResponse +from .types.metastore import LocationMetadata +from .types.metastore import MaintenanceWindow +from .types.metastore import MetadataExport +from .types.metastore import MetadataImport +from .types.metastore import MetadataManagementActivity +from .types.metastore import OperationMetadata +from .types.metastore import Secret +from .types.metastore import Service +from .types.metastore import UpdateMetadataImportRequest +from .types.metastore import UpdateServiceRequest + +__all__ = ( + "DataprocMetastoreAsyncClient", + "CreateMetadataImportRequest", + "CreateServiceRequest", + "DatabaseDumpSpec", + "DataprocMetastoreClient", + "DeleteServiceRequest", + "ExportMetadataRequest", + "GetMetadataImportRequest", + "GetServiceRequest", + "HiveMetastoreConfig", + "KerberosConfig", + "ListMetadataImportsRequest", + "ListMetadataImportsResponse", + "ListServicesRequest", + "ListServicesResponse", + "LocationMetadata", + "MaintenanceWindow", + "MetadataExport", + "MetadataImport", + "MetadataManagementActivity", + "OperationMetadata", + "Secret", + "Service", + "UpdateMetadataImportRequest", + "UpdateServiceRequest", +) diff --git a/google/cloud/metastore_v1/gapic_metadata.json b/google/cloud/metastore_v1/gapic_metadata.json new file mode 100644 index 0000000..dd2f060 --- /dev/null +++ b/google/cloud/metastore_v1/gapic_metadata.json @@ -0,0 +1,123 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.metastore_v1", + "protoPackage": "google.cloud.metastore.v1", + "schema": "1.0", + "services": { + "DataprocMetastore": { + "clients": { + "grpc": { + "libraryClient": "DataprocMetastoreClient", + "rpcs": { + "CreateMetadataImport": { + "methods": [ + "create_metadata_import" + ] + }, + "CreateService": { + "methods": [ + "create_service" + ] + }, + "DeleteService": { + "methods": [ + "delete_service" + ] + }, + "ExportMetadata": { + "methods": [ + "export_metadata" + ] + }, + "GetMetadataImport": { + "methods": [ + "get_metadata_import" + ] + }, + "GetService": { + "methods": [ + "get_service" + ] + }, + "ListMetadataImports": { + "methods": [ + "list_metadata_imports" + ] + }, + "ListServices": { + "methods": [ + "list_services" + ] + }, + "UpdateMetadataImport": { + "methods": [ + "update_metadata_import" + ] + }, + "UpdateService": { + "methods": [ + "update_service" + ] + } + } + }, + "grpc-async": { + "libraryClient": "DataprocMetastoreAsyncClient", + "rpcs": { + "CreateMetadataImport": { + "methods": [ + "create_metadata_import" + ] + }, + "CreateService": { + "methods": [ + "create_service" + ] + }, + "DeleteService": { + "methods": [ + "delete_service" + ] + }, + "ExportMetadata": { + "methods": [ + "export_metadata" + ] + }, + "GetMetadataImport": { + "methods": [ + "get_metadata_import" + ] + }, + "GetService": { + "methods": [ + "get_service" + ] + }, + "ListMetadataImports": { + "methods": [ + "list_metadata_imports" + ] + }, + "ListServices": { + "methods": [ + "list_services" + ] + }, + "UpdateMetadataImport": { + "methods": [ + "update_metadata_import" + ] + }, + "UpdateService": { + "methods": [ + "update_service" + ] + } + } + } + } + } + } +} diff --git a/google/cloud/metastore_v1/py.typed b/google/cloud/metastore_v1/py.typed new file mode 100644 index 0000000..72edca0 --- /dev/null +++ b/google/cloud/metastore_v1/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-metastore package uses inline types. diff --git a/google/cloud/metastore_v1/services/__init__.py b/google/cloud/metastore_v1/services/__init__.py new file mode 100644 index 0000000..4de6597 --- /dev/null +++ b/google/cloud/metastore_v1/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/google/cloud/metastore_v1/services/dataproc_metastore/__init__.py b/google/cloud/metastore_v1/services/dataproc_metastore/__init__.py new file mode 100644 index 0000000..194ae14 --- /dev/null +++ b/google/cloud/metastore_v1/services/dataproc_metastore/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import DataprocMetastoreClient +from .async_client import DataprocMetastoreAsyncClient + +__all__ = ( + "DataprocMetastoreClient", + "DataprocMetastoreAsyncClient", +) diff --git a/google/cloud/metastore_v1/services/dataproc_metastore/async_client.py b/google/cloud/metastore_v1/services/dataproc_metastore/async_client.py new file mode 100644 index 0000000..2deed26 --- /dev/null +++ b/google/cloud/metastore_v1/services/dataproc_metastore/async_client.py @@ -0,0 +1,1104 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import Dict, Sequence, Tuple, Type, Union +import pkg_resources + +import google.api_core.client_options as ClientOptions # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.metastore_v1.services.dataproc_metastore import pagers +from google.cloud.metastore_v1.types import metastore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from .transports.base import DataprocMetastoreTransport, DEFAULT_CLIENT_INFO +from .transports.grpc_asyncio import DataprocMetastoreGrpcAsyncIOTransport +from .client import DataprocMetastoreClient + + +class DataprocMetastoreAsyncClient: + """Configures and manages metastore services. Metastore services are + fully managed, highly available, auto-scaled, auto-healing, + OSS-native deployments of technical metadata management software. + Each metastore service exposes a network endpoint through which + metadata queries are served. Metadata queries can originate from a + variety of sources, including Apache Hive, Apache Presto, and Apache + Spark. + + The Dataproc Metastore API defines the following resource model: + + - The service works with a collection of Google Cloud projects, + named: ``/projects/*`` + + - Each project has a collection of available locations, named: + ``/locations/*`` (a location must refer to a Google Cloud + ``region``) + + - Each location has a collection of services, named: + ``/services/*`` + + - Dataproc Metastore services are resources with names of the form: + + ``/projects/{project_number}/locations/{location_id}/services/{service_id}``. + """ + + _client: DataprocMetastoreClient + + DEFAULT_ENDPOINT = DataprocMetastoreClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = DataprocMetastoreClient.DEFAULT_MTLS_ENDPOINT + + metadata_import_path = staticmethod(DataprocMetastoreClient.metadata_import_path) + parse_metadata_import_path = staticmethod( + DataprocMetastoreClient.parse_metadata_import_path + ) + network_path = staticmethod(DataprocMetastoreClient.network_path) + parse_network_path = staticmethod(DataprocMetastoreClient.parse_network_path) + service_path = staticmethod(DataprocMetastoreClient.service_path) + parse_service_path = staticmethod(DataprocMetastoreClient.parse_service_path) + common_billing_account_path = staticmethod( + DataprocMetastoreClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + DataprocMetastoreClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(DataprocMetastoreClient.common_folder_path) + parse_common_folder_path = staticmethod( + DataprocMetastoreClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + DataprocMetastoreClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + DataprocMetastoreClient.parse_common_organization_path + ) + common_project_path = staticmethod(DataprocMetastoreClient.common_project_path) + parse_common_project_path = staticmethod( + DataprocMetastoreClient.parse_common_project_path + ) + common_location_path = staticmethod(DataprocMetastoreClient.common_location_path) + parse_common_location_path = staticmethod( + DataprocMetastoreClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DataprocMetastoreAsyncClient: The constructed client. + """ + return DataprocMetastoreClient.from_service_account_info.__func__(DataprocMetastoreAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DataprocMetastoreAsyncClient: The constructed client. + """ + return DataprocMetastoreClient.from_service_account_file.__func__(DataprocMetastoreAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> DataprocMetastoreTransport: + """Returns the transport used by the client instance. + + Returns: + DataprocMetastoreTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial( + type(DataprocMetastoreClient).get_transport_class, type(DataprocMetastoreClient) + ) + + def __init__( + self, + *, + credentials: ga_credentials.Credentials = None, + transport: Union[str, DataprocMetastoreTransport] = "grpc_asyncio", + client_options: ClientOptions = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the dataproc metastore client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.DataprocMetastoreTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = DataprocMetastoreClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def list_services( + self, + request: metastore.ListServicesRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListServicesAsyncPager: + r"""Lists services in a project and location. + + Args: + request (:class:`google.cloud.metastore_v1.types.ListServicesRequest`): + The request object. Request message for + [DataprocMetastore.ListServices][google.cloud.metastore.v1.DataprocMetastore.ListServices]. + parent (:class:`str`): + Required. The relative resource name of the location of + metastore services to list, in the following form: + + ``projects/{project_number}/locations/{location_id}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.metastore_v1.services.dataproc_metastore.pagers.ListServicesAsyncPager: + Response message for + [DataprocMetastore.ListServices][google.cloud.metastore.v1.DataprocMetastore.ListServices]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = metastore.ListServicesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_services, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListServicesAsyncPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_service( + self, + request: metastore.GetServiceRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> metastore.Service: + r"""Gets the details of a single service. + + Args: + request (:class:`google.cloud.metastore_v1.types.GetServiceRequest`): + The request object. Request message for + [DataprocMetastore.GetService][google.cloud.metastore.v1.DataprocMetastore.GetService]. + name (:class:`str`): + Required. The relative resource name of the metastore + service to retrieve, in the following form: + + ``projects/{project_number}/locations/{location_id}/services/{service_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.metastore_v1.types.Service: + A managed metastore service that + serves metadata queries. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = metastore.GetServiceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_service, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def create_service( + self, + request: metastore.CreateServiceRequest = None, + *, + parent: str = None, + service: metastore.Service = None, + service_id: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a metastore service in a project and + location. + + Args: + request (:class:`google.cloud.metastore_v1.types.CreateServiceRequest`): + The request object. Request message for + [DataprocMetastore.CreateService][google.cloud.metastore.v1.DataprocMetastore.CreateService]. + parent (:class:`str`): + Required. The relative resource name of the location in + which to create a metastore service, in the following + form: + + ``projects/{project_number}/locations/{location_id}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + service (:class:`google.cloud.metastore_v1.types.Service`): + Required. The Metastore service to create. The ``name`` + field is ignored. The ID of the created metastore + service must be provided in the request's ``service_id`` + field. + + This corresponds to the ``service`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + service_id (:class:`str`): + Required. The ID of the metastore + service, which is used as the final + component of the metastore service's + name. + This value must be between 2 and 63 + characters long inclusive, begin with a + letter, end with a letter or number, and + consist of alpha-numeric ASCII + characters or hyphens. + + This corresponds to the ``service_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.metastore_v1.types.Service` A + managed metastore service that serves metadata queries. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, service, service_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = metastore.CreateServiceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if service is not None: + request.service = service + if service_id is not None: + request.service_id = service_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_service, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + metastore.Service, + metadata_type=metastore.OperationMetadata, + ) + + # Done; return the response. + return response + + async def update_service( + self, + request: metastore.UpdateServiceRequest = None, + *, + service: metastore.Service = None, + update_mask: field_mask_pb2.FieldMask = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates the parameters of a single service. + + Args: + request (:class:`google.cloud.metastore_v1.types.UpdateServiceRequest`): + The request object. Request message for + [DataprocMetastore.UpdateService][google.cloud.metastore.v1.DataprocMetastore.UpdateService]. + service (:class:`google.cloud.metastore_v1.types.Service`): + Required. The metastore service to update. The server + only merges fields in the service if they are specified + in ``update_mask``. + + The metastore service's ``name`` field is used to + identify the metastore service to be updated. + + This corresponds to the ``service`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. A field mask used to specify the fields to be + overwritten in the metastore service resource by the + update. Fields specified in the ``update_mask`` are + relative to the resource (not to the full request). A + field is overwritten if it is in the mask. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.metastore_v1.types.Service` A + managed metastore service that serves metadata queries. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([service, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = metastore.UpdateServiceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if service is not None: + request.service = service + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_service, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("service.name", request.service.name),) + ), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + metastore.Service, + metadata_type=metastore.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_service( + self, + request: metastore.DeleteServiceRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes a single service. + + Args: + request (:class:`google.cloud.metastore_v1.types.DeleteServiceRequest`): + The request object. Request message for + [DataprocMetastore.DeleteService][google.cloud.metastore.v1.DataprocMetastore.DeleteService]. + name (:class:`str`): + Required. The relative resource name of the metastore + service to delete, in the following form: + + ``projects/{project_number}/locations/{location_id}/services/{service_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + The JSON representation for Empty is empty JSON + object {}. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = metastore.DeleteServiceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_service, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=metastore.OperationMetadata, + ) + + # Done; return the response. + return response + + async def list_metadata_imports( + self, + request: metastore.ListMetadataImportsRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListMetadataImportsAsyncPager: + r"""Lists imports in a service. + + Args: + request (:class:`google.cloud.metastore_v1.types.ListMetadataImportsRequest`): + The request object. Request message for + [DataprocMetastore.ListMetadataImports][google.cloud.metastore.v1.DataprocMetastore.ListMetadataImports]. + parent (:class:`str`): + Required. The relative resource name of the service + whose metadata imports to list, in the following form: + + ``projects/{project_number}/locations/{location_id}/services/{service_id}/metadataImports``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.metastore_v1.services.dataproc_metastore.pagers.ListMetadataImportsAsyncPager: + Response message for + [DataprocMetastore.ListMetadataImports][google.cloud.metastore.v1.DataprocMetastore.ListMetadataImports]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = metastore.ListMetadataImportsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_metadata_imports, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListMetadataImportsAsyncPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_metadata_import( + self, + request: metastore.GetMetadataImportRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> metastore.MetadataImport: + r"""Gets details of a single import. + + Args: + request (:class:`google.cloud.metastore_v1.types.GetMetadataImportRequest`): + The request object. Request message for + [DataprocMetastore.GetMetadataImport][google.cloud.metastore.v1.DataprocMetastore.GetMetadataImport]. + name (:class:`str`): + Required. The relative resource name of the metadata + import to retrieve, in the following form: + + ``projects/{project_number}/locations/{location_id}/services/{service_id}/metadataImports/{import_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.metastore_v1.types.MetadataImport: + A metastore resource that imports + metadata. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = metastore.GetMetadataImportRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_metadata_import, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def create_metadata_import( + self, + request: metastore.CreateMetadataImportRequest = None, + *, + parent: str = None, + metadata_import: metastore.MetadataImport = None, + metadata_import_id: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a new MetadataImport in a given project and + location. + + Args: + request (:class:`google.cloud.metastore_v1.types.CreateMetadataImportRequest`): + The request object. Request message for + [DataprocMetastore.CreateMetadataImport][google.cloud.metastore.v1.DataprocMetastore.CreateMetadataImport]. + parent (:class:`str`): + Required. The relative resource name of the service in + which to create a metastore import, in the following + form: + + ``projects/{project_number}/locations/{location_id}/services/{service_id}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + metadata_import (:class:`google.cloud.metastore_v1.types.MetadataImport`): + Required. The metadata import to create. The ``name`` + field is ignored. The ID of the created metadata import + must be provided in the request's ``metadata_import_id`` + field. + + This corresponds to the ``metadata_import`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + metadata_import_id (:class:`str`): + Required. The ID of the metadata + import, which is used as the final + component of the metadata import's name. + This value must be between 1 and 64 + characters long, begin with a letter, + end with a letter or number, and consist + of alpha-numeric ASCII characters or + hyphens. + + This corresponds to the ``metadata_import_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.metastore_v1.types.MetadataImport` + A metastore resource that imports metadata. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, metadata_import, metadata_import_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = metastore.CreateMetadataImportRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if metadata_import is not None: + request.metadata_import = metadata_import + if metadata_import_id is not None: + request.metadata_import_id = metadata_import_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_metadata_import, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + metastore.MetadataImport, + metadata_type=metastore.OperationMetadata, + ) + + # Done; return the response. + return response + + async def update_metadata_import( + self, + request: metastore.UpdateMetadataImportRequest = None, + *, + metadata_import: metastore.MetadataImport = None, + update_mask: field_mask_pb2.FieldMask = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates a single import. + Only the description field of MetadataImport is + supported to be updated. + + Args: + request (:class:`google.cloud.metastore_v1.types.UpdateMetadataImportRequest`): + The request object. Request message for + [DataprocMetastore.UpdateMetadataImport][google.cloud.metastore.v1.DataprocMetastore.UpdateMetadataImport]. + metadata_import (:class:`google.cloud.metastore_v1.types.MetadataImport`): + Required. The metadata import to update. The server only + merges fields in the import if they are specified in + ``update_mask``. + + The metadata import's ``name`` field is used to identify + the metastore import to be updated. + + This corresponds to the ``metadata_import`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. A field mask used to specify the fields to be + overwritten in the metadata import resource by the + update. Fields specified in the ``update_mask`` are + relative to the resource (not to the full request). A + field is overwritten if it is in the mask. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.metastore_v1.types.MetadataImport` + A metastore resource that imports metadata. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([metadata_import, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = metastore.UpdateMetadataImportRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if metadata_import is not None: + request.metadata_import = metadata_import + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_metadata_import, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("metadata_import.name", request.metadata_import.name),) + ), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + metastore.MetadataImport, + metadata_type=metastore.OperationMetadata, + ) + + # Done; return the response. + return response + + async def export_metadata( + self, + request: metastore.ExportMetadataRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Exports metadata from a service. + + Args: + request (:class:`google.cloud.metastore_v1.types.ExportMetadataRequest`): + The request object. Request message for + [DataprocMetastore.ExportMetadata][google.cloud.metastore.v1.DataprocMetastore.ExportMetadata]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.metastore_v1.types.MetadataExport` + The details of a metadata export operation. + + """ + # Create or coerce a protobuf request object. + request = metastore.ExportMetadataRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.export_metadata, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("service", request.service),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + metastore.MetadataExport, + metadata_type=metastore.OperationMetadata, + ) + + # Done; return the response. + return response + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution("google-cloud-metastore",).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +__all__ = ("DataprocMetastoreAsyncClient",) diff --git a/google/cloud/metastore_v1/services/dataproc_metastore/client.py b/google/cloud/metastore_v1/services/dataproc_metastore/client.py new file mode 100644 index 0000000..78ad82b --- /dev/null +++ b/google/cloud/metastore_v1/services/dataproc_metastore/client.py @@ -0,0 +1,1322 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from distutils import util +import os +import re +from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +import pkg_resources + +from google.api_core import client_options as client_options_lib # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.metastore_v1.services.dataproc_metastore import pagers +from google.cloud.metastore_v1.types import metastore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from .transports.base import DataprocMetastoreTransport, DEFAULT_CLIENT_INFO +from .transports.grpc import DataprocMetastoreGrpcTransport +from .transports.grpc_asyncio import DataprocMetastoreGrpcAsyncIOTransport + + +class DataprocMetastoreClientMeta(type): + """Metaclass for the DataprocMetastore client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[DataprocMetastoreTransport]] + _transport_registry["grpc"] = DataprocMetastoreGrpcTransport + _transport_registry["grpc_asyncio"] = DataprocMetastoreGrpcAsyncIOTransport + + def get_transport_class( + cls, label: str = None, + ) -> Type[DataprocMetastoreTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class DataprocMetastoreClient(metaclass=DataprocMetastoreClientMeta): + """Configures and manages metastore services. Metastore services are + fully managed, highly available, auto-scaled, auto-healing, + OSS-native deployments of technical metadata management software. + Each metastore service exposes a network endpoint through which + metadata queries are served. Metadata queries can originate from a + variety of sources, including Apache Hive, Apache Presto, and Apache + Spark. + + The Dataproc Metastore API defines the following resource model: + + - The service works with a collection of Google Cloud projects, + named: ``/projects/*`` + + - Each project has a collection of available locations, named: + ``/locations/*`` (a location must refer to a Google Cloud + ``region``) + + - Each location has a collection of services, named: + ``/services/*`` + + - Dataproc Metastore services are resources with names of the form: + + ``/projects/{project_number}/locations/{location_id}/services/{service_id}``. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "metastore.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DataprocMetastoreClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DataprocMetastoreClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> DataprocMetastoreTransport: + """Returns the transport used by the client instance. + + Returns: + DataprocMetastoreTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def metadata_import_path( + project: str, location: str, service: str, metadata_import: str, + ) -> str: + """Returns a fully-qualified metadata_import string.""" + return "projects/{project}/locations/{location}/services/{service}/metadataImports/{metadata_import}".format( + project=project, + location=location, + service=service, + metadata_import=metadata_import, + ) + + @staticmethod + def parse_metadata_import_path(path: str) -> Dict[str, str]: + """Parses a metadata_import path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/services/(?P.+?)/metadataImports/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def network_path(project: str, network: str,) -> str: + """Returns a fully-qualified network string.""" + return "projects/{project}/global/networks/{network}".format( + project=project, network=network, + ) + + @staticmethod + def parse_network_path(path: str) -> Dict[str, str]: + """Parses a network path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/global/networks/(?P.+?)$", path + ) + return m.groupdict() if m else {} + + @staticmethod + def service_path(project: str, location: str, service: str,) -> str: + """Returns a fully-qualified service string.""" + return "projects/{project}/locations/{location}/services/{service}".format( + project=project, location=location, service=service, + ) + + @staticmethod + def parse_service_path(path: str) -> Dict[str, str]: + """Parses a service path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/services/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path(billing_account: str,) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str,) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder,) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str,) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization,) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str,) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project,) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str,) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, DataprocMetastoreTransport, None] = None, + client_options: Optional[client_options_lib.ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the dataproc metastore client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, DataprocMetastoreTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + + # Create SSL credentials for mutual TLS if needed. + use_client_cert = bool( + util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + ) + + client_cert_source_func = None + is_mtls = False + if use_client_cert: + if client_options.client_cert_source: + is_mtls = True + client_cert_source_func = client_options.client_cert_source + else: + is_mtls = mtls.has_default_client_cert_source() + if is_mtls: + client_cert_source_func = mtls.default_client_cert_source() + else: + client_cert_source_func = None + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + else: + use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_mtls_env == "never": + api_endpoint = self.DEFAULT_ENDPOINT + elif use_mtls_env == "always": + api_endpoint = self.DEFAULT_MTLS_ENDPOINT + elif use_mtls_env == "auto": + if is_mtls: + api_endpoint = self.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = self.DEFAULT_ENDPOINT + else: + raise MutualTLSChannelError( + "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted " + "values: never, auto, always" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, DataprocMetastoreTransport): + # transport is a DataprocMetastoreTransport instance. + if credentials or client_options.credentials_file: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + ) + + def list_services( + self, + request: metastore.ListServicesRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListServicesPager: + r"""Lists services in a project and location. + + Args: + request (google.cloud.metastore_v1.types.ListServicesRequest): + The request object. Request message for + [DataprocMetastore.ListServices][google.cloud.metastore.v1.DataprocMetastore.ListServices]. + parent (str): + Required. The relative resource name of the location of + metastore services to list, in the following form: + + ``projects/{project_number}/locations/{location_id}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.metastore_v1.services.dataproc_metastore.pagers.ListServicesPager: + Response message for + [DataprocMetastore.ListServices][google.cloud.metastore.v1.DataprocMetastore.ListServices]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a metastore.ListServicesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, metastore.ListServicesRequest): + request = metastore.ListServicesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_services] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListServicesPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + def get_service( + self, + request: metastore.GetServiceRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> metastore.Service: + r"""Gets the details of a single service. + + Args: + request (google.cloud.metastore_v1.types.GetServiceRequest): + The request object. Request message for + [DataprocMetastore.GetService][google.cloud.metastore.v1.DataprocMetastore.GetService]. + name (str): + Required. The relative resource name of the metastore + service to retrieve, in the following form: + + ``projects/{project_number}/locations/{location_id}/services/{service_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.metastore_v1.types.Service: + A managed metastore service that + serves metadata queries. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a metastore.GetServiceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, metastore.GetServiceRequest): + request = metastore.GetServiceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_service] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def create_service( + self, + request: metastore.CreateServiceRequest = None, + *, + parent: str = None, + service: metastore.Service = None, + service_id: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates a metastore service in a project and + location. + + Args: + request (google.cloud.metastore_v1.types.CreateServiceRequest): + The request object. Request message for + [DataprocMetastore.CreateService][google.cloud.metastore.v1.DataprocMetastore.CreateService]. + parent (str): + Required. The relative resource name of the location in + which to create a metastore service, in the following + form: + + ``projects/{project_number}/locations/{location_id}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + service (google.cloud.metastore_v1.types.Service): + Required. The Metastore service to create. The ``name`` + field is ignored. The ID of the created metastore + service must be provided in the request's ``service_id`` + field. + + This corresponds to the ``service`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + service_id (str): + Required. The ID of the metastore + service, which is used as the final + component of the metastore service's + name. + This value must be between 2 and 63 + characters long inclusive, begin with a + letter, end with a letter or number, and + consist of alpha-numeric ASCII + characters or hyphens. + + This corresponds to the ``service_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.metastore_v1.types.Service` A + managed metastore service that serves metadata queries. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, service, service_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a metastore.CreateServiceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, metastore.CreateServiceRequest): + request = metastore.CreateServiceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if service is not None: + request.service = service + if service_id is not None: + request.service_id = service_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_service] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + metastore.Service, + metadata_type=metastore.OperationMetadata, + ) + + # Done; return the response. + return response + + def update_service( + self, + request: metastore.UpdateServiceRequest = None, + *, + service: metastore.Service = None, + update_mask: field_mask_pb2.FieldMask = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Updates the parameters of a single service. + + Args: + request (google.cloud.metastore_v1.types.UpdateServiceRequest): + The request object. Request message for + [DataprocMetastore.UpdateService][google.cloud.metastore.v1.DataprocMetastore.UpdateService]. + service (google.cloud.metastore_v1.types.Service): + Required. The metastore service to update. The server + only merges fields in the service if they are specified + in ``update_mask``. + + The metastore service's ``name`` field is used to + identify the metastore service to be updated. + + This corresponds to the ``service`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. A field mask used to specify the fields to be + overwritten in the metastore service resource by the + update. Fields specified in the ``update_mask`` are + relative to the resource (not to the full request). A + field is overwritten if it is in the mask. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.metastore_v1.types.Service` A + managed metastore service that serves metadata queries. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([service, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a metastore.UpdateServiceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, metastore.UpdateServiceRequest): + request = metastore.UpdateServiceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if service is not None: + request.service = service + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_service] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("service.name", request.service.name),) + ), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + metastore.Service, + metadata_type=metastore.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_service( + self, + request: metastore.DeleteServiceRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Deletes a single service. + + Args: + request (google.cloud.metastore_v1.types.DeleteServiceRequest): + The request object. Request message for + [DataprocMetastore.DeleteService][google.cloud.metastore.v1.DataprocMetastore.DeleteService]. + name (str): + Required. The relative resource name of the metastore + service to delete, in the following form: + + ``projects/{project_number}/locations/{location_id}/services/{service_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + The JSON representation for Empty is empty JSON + object {}. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a metastore.DeleteServiceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, metastore.DeleteServiceRequest): + request = metastore.DeleteServiceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_service] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=metastore.OperationMetadata, + ) + + # Done; return the response. + return response + + def list_metadata_imports( + self, + request: metastore.ListMetadataImportsRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListMetadataImportsPager: + r"""Lists imports in a service. + + Args: + request (google.cloud.metastore_v1.types.ListMetadataImportsRequest): + The request object. Request message for + [DataprocMetastore.ListMetadataImports][google.cloud.metastore.v1.DataprocMetastore.ListMetadataImports]. + parent (str): + Required. The relative resource name of the service + whose metadata imports to list, in the following form: + + ``projects/{project_number}/locations/{location_id}/services/{service_id}/metadataImports``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.metastore_v1.services.dataproc_metastore.pagers.ListMetadataImportsPager: + Response message for + [DataprocMetastore.ListMetadataImports][google.cloud.metastore.v1.DataprocMetastore.ListMetadataImports]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a metastore.ListMetadataImportsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, metastore.ListMetadataImportsRequest): + request = metastore.ListMetadataImportsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_metadata_imports] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListMetadataImportsPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + def get_metadata_import( + self, + request: metastore.GetMetadataImportRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> metastore.MetadataImport: + r"""Gets details of a single import. + + Args: + request (google.cloud.metastore_v1.types.GetMetadataImportRequest): + The request object. Request message for + [DataprocMetastore.GetMetadataImport][google.cloud.metastore.v1.DataprocMetastore.GetMetadataImport]. + name (str): + Required. The relative resource name of the metadata + import to retrieve, in the following form: + + ``projects/{project_number}/locations/{location_id}/services/{service_id}/metadataImports/{import_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.metastore_v1.types.MetadataImport: + A metastore resource that imports + metadata. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a metastore.GetMetadataImportRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, metastore.GetMetadataImportRequest): + request = metastore.GetMetadataImportRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_metadata_import] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def create_metadata_import( + self, + request: metastore.CreateMetadataImportRequest = None, + *, + parent: str = None, + metadata_import: metastore.MetadataImport = None, + metadata_import_id: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates a new MetadataImport in a given project and + location. + + Args: + request (google.cloud.metastore_v1.types.CreateMetadataImportRequest): + The request object. Request message for + [DataprocMetastore.CreateMetadataImport][google.cloud.metastore.v1.DataprocMetastore.CreateMetadataImport]. + parent (str): + Required. The relative resource name of the service in + which to create a metastore import, in the following + form: + + ``projects/{project_number}/locations/{location_id}/services/{service_id}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + metadata_import (google.cloud.metastore_v1.types.MetadataImport): + Required. The metadata import to create. The ``name`` + field is ignored. The ID of the created metadata import + must be provided in the request's ``metadata_import_id`` + field. + + This corresponds to the ``metadata_import`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + metadata_import_id (str): + Required. The ID of the metadata + import, which is used as the final + component of the metadata import's name. + This value must be between 1 and 64 + characters long, begin with a letter, + end with a letter or number, and consist + of alpha-numeric ASCII characters or + hyphens. + + This corresponds to the ``metadata_import_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.metastore_v1.types.MetadataImport` + A metastore resource that imports metadata. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, metadata_import, metadata_import_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a metastore.CreateMetadataImportRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, metastore.CreateMetadataImportRequest): + request = metastore.CreateMetadataImportRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if metadata_import is not None: + request.metadata_import = metadata_import + if metadata_import_id is not None: + request.metadata_import_id = metadata_import_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_metadata_import] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + metastore.MetadataImport, + metadata_type=metastore.OperationMetadata, + ) + + # Done; return the response. + return response + + def update_metadata_import( + self, + request: metastore.UpdateMetadataImportRequest = None, + *, + metadata_import: metastore.MetadataImport = None, + update_mask: field_mask_pb2.FieldMask = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Updates a single import. + Only the description field of MetadataImport is + supported to be updated. + + Args: + request (google.cloud.metastore_v1.types.UpdateMetadataImportRequest): + The request object. Request message for + [DataprocMetastore.UpdateMetadataImport][google.cloud.metastore.v1.DataprocMetastore.UpdateMetadataImport]. + metadata_import (google.cloud.metastore_v1.types.MetadataImport): + Required. The metadata import to update. The server only + merges fields in the import if they are specified in + ``update_mask``. + + The metadata import's ``name`` field is used to identify + the metastore import to be updated. + + This corresponds to the ``metadata_import`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. A field mask used to specify the fields to be + overwritten in the metadata import resource by the + update. Fields specified in the ``update_mask`` are + relative to the resource (not to the full request). A + field is overwritten if it is in the mask. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.metastore_v1.types.MetadataImport` + A metastore resource that imports metadata. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([metadata_import, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a metastore.UpdateMetadataImportRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, metastore.UpdateMetadataImportRequest): + request = metastore.UpdateMetadataImportRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if metadata_import is not None: + request.metadata_import = metadata_import + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_metadata_import] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("metadata_import.name", request.metadata_import.name),) + ), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + metastore.MetadataImport, + metadata_type=metastore.OperationMetadata, + ) + + # Done; return the response. + return response + + def export_metadata( + self, + request: metastore.ExportMetadataRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Exports metadata from a service. + + Args: + request (google.cloud.metastore_v1.types.ExportMetadataRequest): + The request object. Request message for + [DataprocMetastore.ExportMetadata][google.cloud.metastore.v1.DataprocMetastore.ExportMetadata]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.metastore_v1.types.MetadataExport` + The details of a metadata export operation. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a metastore.ExportMetadataRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, metastore.ExportMetadataRequest): + request = metastore.ExportMetadataRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.export_metadata] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("service", request.service),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + metastore.MetadataExport, + metadata_type=metastore.OperationMetadata, + ) + + # Done; return the response. + return response + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution("google-cloud-metastore",).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +__all__ = ("DataprocMetastoreClient",) diff --git a/google/cloud/metastore_v1/services/dataproc_metastore/pagers.py b/google/cloud/metastore_v1/services/dataproc_metastore/pagers.py new file mode 100644 index 0000000..d144ef3 --- /dev/null +++ b/google/cloud/metastore_v1/services/dataproc_metastore/pagers.py @@ -0,0 +1,283 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterable, + Awaitable, + Callable, + Iterable, + Sequence, + Tuple, + Optional, +) + +from google.cloud.metastore_v1.types import metastore + + +class ListServicesPager: + """A pager for iterating through ``list_services`` requests. + + This class thinly wraps an initial + :class:`google.cloud.metastore_v1.types.ListServicesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``services`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListServices`` requests and continue to iterate + through the ``services`` field on the + corresponding responses. + + All the usual :class:`google.cloud.metastore_v1.types.ListServicesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., metastore.ListServicesResponse], + request: metastore.ListServicesRequest, + response: metastore.ListServicesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.metastore_v1.types.ListServicesRequest): + The initial request object. + response (google.cloud.metastore_v1.types.ListServicesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = metastore.ListServicesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterable[metastore.ListServicesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterable[metastore.Service]: + for page in self.pages: + yield from page.services + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListServicesAsyncPager: + """A pager for iterating through ``list_services`` requests. + + This class thinly wraps an initial + :class:`google.cloud.metastore_v1.types.ListServicesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``services`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListServices`` requests and continue to iterate + through the ``services`` field on the + corresponding responses. + + All the usual :class:`google.cloud.metastore_v1.types.ListServicesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[metastore.ListServicesResponse]], + request: metastore.ListServicesRequest, + response: metastore.ListServicesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.metastore_v1.types.ListServicesRequest): + The initial request object. + response (google.cloud.metastore_v1.types.ListServicesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = metastore.ListServicesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterable[metastore.ListServicesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterable[metastore.Service]: + async def async_generator(): + async for page in self.pages: + for response in page.services: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListMetadataImportsPager: + """A pager for iterating through ``list_metadata_imports`` requests. + + This class thinly wraps an initial + :class:`google.cloud.metastore_v1.types.ListMetadataImportsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``metadata_imports`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListMetadataImports`` requests and continue to iterate + through the ``metadata_imports`` field on the + corresponding responses. + + All the usual :class:`google.cloud.metastore_v1.types.ListMetadataImportsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., metastore.ListMetadataImportsResponse], + request: metastore.ListMetadataImportsRequest, + response: metastore.ListMetadataImportsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.metastore_v1.types.ListMetadataImportsRequest): + The initial request object. + response (google.cloud.metastore_v1.types.ListMetadataImportsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = metastore.ListMetadataImportsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterable[metastore.ListMetadataImportsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterable[metastore.MetadataImport]: + for page in self.pages: + yield from page.metadata_imports + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListMetadataImportsAsyncPager: + """A pager for iterating through ``list_metadata_imports`` requests. + + This class thinly wraps an initial + :class:`google.cloud.metastore_v1.types.ListMetadataImportsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``metadata_imports`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListMetadataImports`` requests and continue to iterate + through the ``metadata_imports`` field on the + corresponding responses. + + All the usual :class:`google.cloud.metastore_v1.types.ListMetadataImportsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[metastore.ListMetadataImportsResponse]], + request: metastore.ListMetadataImportsRequest, + response: metastore.ListMetadataImportsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.metastore_v1.types.ListMetadataImportsRequest): + The initial request object. + response (google.cloud.metastore_v1.types.ListMetadataImportsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = metastore.ListMetadataImportsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterable[metastore.ListMetadataImportsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterable[metastore.MetadataImport]: + async def async_generator(): + async for page in self.pages: + for response in page.metadata_imports: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/google/cloud/metastore_v1/services/dataproc_metastore/transports/__init__.py b/google/cloud/metastore_v1/services/dataproc_metastore/transports/__init__.py new file mode 100644 index 0000000..2f902e5 --- /dev/null +++ b/google/cloud/metastore_v1/services/dataproc_metastore/transports/__init__.py @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import DataprocMetastoreTransport +from .grpc import DataprocMetastoreGrpcTransport +from .grpc_asyncio import DataprocMetastoreGrpcAsyncIOTransport + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[DataprocMetastoreTransport]] +_transport_registry["grpc"] = DataprocMetastoreGrpcTransport +_transport_registry["grpc_asyncio"] = DataprocMetastoreGrpcAsyncIOTransport + +__all__ = ( + "DataprocMetastoreTransport", + "DataprocMetastoreGrpcTransport", + "DataprocMetastoreGrpcAsyncIOTransport", +) diff --git a/google/cloud/metastore_v1/services/dataproc_metastore/transports/base.py b/google/cloud/metastore_v1/services/dataproc_metastore/transports/base.py new file mode 100644 index 0000000..e57fd1a --- /dev/null +++ b/google/cloud/metastore_v1/services/dataproc_metastore/transports/base.py @@ -0,0 +1,308 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union +import packaging.version +import pkg_resources + +import google.auth # type: ignore +import google.api_core # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.api_core import operations_v1 # type: ignore +from google.auth import credentials as ga_credentials # type: ignore + +from google.cloud.metastore_v1.types import metastore +from google.longrunning import operations_pb2 # type: ignore + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution("google-cloud-metastore",).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + +try: + # google.auth.__version__ was added in 1.26.0 + _GOOGLE_AUTH_VERSION = google.auth.__version__ +except AttributeError: + try: # try pkg_resources if it is available + _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version + except pkg_resources.DistributionNotFound: # pragma: NO COVER + _GOOGLE_AUTH_VERSION = None + +_API_CORE_VERSION = google.api_core.__version__ + + +class DataprocMetastoreTransport(abc.ABC): + """Abstract transport class for DataprocMetastore.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + DEFAULT_HOST: str = "metastore.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + """ + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + + # Save the scopes. + self._scopes = scopes or self.AUTH_SCOPES + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + + # Save the credentials. + self._credentials = credentials + + # TODO(busunkim): These two class methods are in the base transport + # to avoid duplicating code across the transport classes. These functions + # should be deleted once the minimum required versions of google-api-core + # and google-auth are increased. + + # TODO: Remove this function once google-auth >= 1.25.0 is required + @classmethod + def _get_scopes_kwargs( + cls, host: str, scopes: Optional[Sequence[str]] + ) -> Dict[str, Optional[Sequence[str]]]: + """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" + + scopes_kwargs = {} + + if _GOOGLE_AUTH_VERSION and ( + packaging.version.parse(_GOOGLE_AUTH_VERSION) + >= packaging.version.parse("1.25.0") + ): + scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} + else: + scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} + + return scopes_kwargs + + # TODO: Remove this function once google-api-core >= 1.26.0 is required + @classmethod + def _get_self_signed_jwt_kwargs( + cls, host: str, scopes: Optional[Sequence[str]] + ) -> Dict[str, Union[Optional[Sequence[str]], str]]: + """Returns kwargs to pass to grpc_helpers.create_channel depending on the google-api-core version""" + + self_signed_jwt_kwargs: Dict[str, Union[Optional[Sequence[str]], str]] = {} + + if _API_CORE_VERSION and ( + packaging.version.parse(_API_CORE_VERSION) + >= packaging.version.parse("1.26.0") + ): + self_signed_jwt_kwargs["default_scopes"] = cls.AUTH_SCOPES + self_signed_jwt_kwargs["scopes"] = scopes + self_signed_jwt_kwargs["default_host"] = cls.DEFAULT_HOST + else: + self_signed_jwt_kwargs["scopes"] = scopes or cls.AUTH_SCOPES + + return self_signed_jwt_kwargs + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.list_services: gapic_v1.method.wrap_method( + self.list_services, default_timeout=None, client_info=client_info, + ), + self.get_service: gapic_v1.method.wrap_method( + self.get_service, default_timeout=None, client_info=client_info, + ), + self.create_service: gapic_v1.method.wrap_method( + self.create_service, default_timeout=60.0, client_info=client_info, + ), + self.update_service: gapic_v1.method.wrap_method( + self.update_service, default_timeout=60.0, client_info=client_info, + ), + self.delete_service: gapic_v1.method.wrap_method( + self.delete_service, default_timeout=60.0, client_info=client_info, + ), + self.list_metadata_imports: gapic_v1.method.wrap_method( + self.list_metadata_imports, + default_timeout=None, + client_info=client_info, + ), + self.get_metadata_import: gapic_v1.method.wrap_method( + self.get_metadata_import, default_timeout=None, client_info=client_info, + ), + self.create_metadata_import: gapic_v1.method.wrap_method( + self.create_metadata_import, + default_timeout=60.0, + client_info=client_info, + ), + self.update_metadata_import: gapic_v1.method.wrap_method( + self.update_metadata_import, + default_timeout=60.0, + client_info=client_info, + ), + self.export_metadata: gapic_v1.method.wrap_method( + self.export_metadata, default_timeout=60.0, client_info=client_info, + ), + } + + @property + def operations_client(self) -> operations_v1.OperationsClient: + """Return the client designed to process long-running operations.""" + raise NotImplementedError() + + @property + def list_services( + self, + ) -> Callable[ + [metastore.ListServicesRequest], + Union[ + metastore.ListServicesResponse, Awaitable[metastore.ListServicesResponse] + ], + ]: + raise NotImplementedError() + + @property + def get_service( + self, + ) -> Callable[ + [metastore.GetServiceRequest], + Union[metastore.Service, Awaitable[metastore.Service]], + ]: + raise NotImplementedError() + + @property + def create_service( + self, + ) -> Callable[ + [metastore.CreateServiceRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def update_service( + self, + ) -> Callable[ + [metastore.UpdateServiceRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def delete_service( + self, + ) -> Callable[ + [metastore.DeleteServiceRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def list_metadata_imports( + self, + ) -> Callable[ + [metastore.ListMetadataImportsRequest], + Union[ + metastore.ListMetadataImportsResponse, + Awaitable[metastore.ListMetadataImportsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_metadata_import( + self, + ) -> Callable[ + [metastore.GetMetadataImportRequest], + Union[metastore.MetadataImport, Awaitable[metastore.MetadataImport]], + ]: + raise NotImplementedError() + + @property + def create_metadata_import( + self, + ) -> Callable[ + [metastore.CreateMetadataImportRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def update_metadata_import( + self, + ) -> Callable[ + [metastore.UpdateMetadataImportRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def export_metadata( + self, + ) -> Callable[ + [metastore.ExportMetadataRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + +__all__ = ("DataprocMetastoreTransport",) diff --git a/google/cloud/metastore_v1/services/dataproc_metastore/transports/grpc.py b/google/cloud/metastore_v1/services/dataproc_metastore/transports/grpc.py new file mode 100644 index 0000000..0551adc --- /dev/null +++ b/google/cloud/metastore_v1/services/dataproc_metastore/transports/grpc.py @@ -0,0 +1,528 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import grpc_helpers # type: ignore +from google.api_core import operations_v1 # type: ignore +from google.api_core import gapic_v1 # type: ignore +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore + +from google.cloud.metastore_v1.types import metastore +from google.longrunning import operations_pb2 # type: ignore +from .base import DataprocMetastoreTransport, DEFAULT_CLIENT_INFO + + +class DataprocMetastoreGrpcTransport(DataprocMetastoreTransport): + """gRPC backend transport for DataprocMetastore. + + Configures and manages metastore services. Metastore services are + fully managed, highly available, auto-scaled, auto-healing, + OSS-native deployments of technical metadata management software. + Each metastore service exposes a network endpoint through which + metadata queries are served. Metadata queries can originate from a + variety of sources, including Apache Hive, Apache Presto, and Apache + Spark. + + The Dataproc Metastore API defines the following resource model: + + - The service works with a collection of Google Cloud projects, + named: ``/projects/*`` + + - Each project has a collection of available locations, named: + ``/locations/*`` (a location must refer to a Google Cloud + ``region``) + + - Each location has a collection of services, named: + ``/services/*`` + + - Dataproc Metastore services are resources with names of the form: + + ``/projects/{project_number}/locations/{location_id}/services/{service_id}``. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "metastore.googleapis.com", + credentials: ga_credentials.Credentials = None, + credentials_file: str = None, + scopes: Sequence[str] = None, + channel: grpc.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or applicatin default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + credentials=self._credentials, + credentials_file=credentials_file, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "metastore.googleapis.com", + credentials: ga_credentials.Credentials = None, + credentials_file: str = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + self_signed_jwt_kwargs = cls._get_self_signed_jwt_kwargs(host, scopes) + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + **self_signed_jwt_kwargs, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service. + """ + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Sanity check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsClient(self.grpc_channel) + + # Return the client from cache. + return self._operations_client + + @property + def list_services( + self, + ) -> Callable[[metastore.ListServicesRequest], metastore.ListServicesResponse]: + r"""Return a callable for the list services method over gRPC. + + Lists services in a project and location. + + Returns: + Callable[[~.ListServicesRequest], + ~.ListServicesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_services" not in self._stubs: + self._stubs["list_services"] = self.grpc_channel.unary_unary( + "/google.cloud.metastore.v1.DataprocMetastore/ListServices", + request_serializer=metastore.ListServicesRequest.serialize, + response_deserializer=metastore.ListServicesResponse.deserialize, + ) + return self._stubs["list_services"] + + @property + def get_service(self) -> Callable[[metastore.GetServiceRequest], metastore.Service]: + r"""Return a callable for the get service method over gRPC. + + Gets the details of a single service. + + Returns: + Callable[[~.GetServiceRequest], + ~.Service]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_service" not in self._stubs: + self._stubs["get_service"] = self.grpc_channel.unary_unary( + "/google.cloud.metastore.v1.DataprocMetastore/GetService", + request_serializer=metastore.GetServiceRequest.serialize, + response_deserializer=metastore.Service.deserialize, + ) + return self._stubs["get_service"] + + @property + def create_service( + self, + ) -> Callable[[metastore.CreateServiceRequest], operations_pb2.Operation]: + r"""Return a callable for the create service method over gRPC. + + Creates a metastore service in a project and + location. + + Returns: + Callable[[~.CreateServiceRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_service" not in self._stubs: + self._stubs["create_service"] = self.grpc_channel.unary_unary( + "/google.cloud.metastore.v1.DataprocMetastore/CreateService", + request_serializer=metastore.CreateServiceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_service"] + + @property + def update_service( + self, + ) -> Callable[[metastore.UpdateServiceRequest], operations_pb2.Operation]: + r"""Return a callable for the update service method over gRPC. + + Updates the parameters of a single service. + + Returns: + Callable[[~.UpdateServiceRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_service" not in self._stubs: + self._stubs["update_service"] = self.grpc_channel.unary_unary( + "/google.cloud.metastore.v1.DataprocMetastore/UpdateService", + request_serializer=metastore.UpdateServiceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_service"] + + @property + def delete_service( + self, + ) -> Callable[[metastore.DeleteServiceRequest], operations_pb2.Operation]: + r"""Return a callable for the delete service method over gRPC. + + Deletes a single service. + + Returns: + Callable[[~.DeleteServiceRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_service" not in self._stubs: + self._stubs["delete_service"] = self.grpc_channel.unary_unary( + "/google.cloud.metastore.v1.DataprocMetastore/DeleteService", + request_serializer=metastore.DeleteServiceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_service"] + + @property + def list_metadata_imports( + self, + ) -> Callable[ + [metastore.ListMetadataImportsRequest], metastore.ListMetadataImportsResponse + ]: + r"""Return a callable for the list metadata imports method over gRPC. + + Lists imports in a service. + + Returns: + Callable[[~.ListMetadataImportsRequest], + ~.ListMetadataImportsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_metadata_imports" not in self._stubs: + self._stubs["list_metadata_imports"] = self.grpc_channel.unary_unary( + "/google.cloud.metastore.v1.DataprocMetastore/ListMetadataImports", + request_serializer=metastore.ListMetadataImportsRequest.serialize, + response_deserializer=metastore.ListMetadataImportsResponse.deserialize, + ) + return self._stubs["list_metadata_imports"] + + @property + def get_metadata_import( + self, + ) -> Callable[[metastore.GetMetadataImportRequest], metastore.MetadataImport]: + r"""Return a callable for the get metadata import method over gRPC. + + Gets details of a single import. + + Returns: + Callable[[~.GetMetadataImportRequest], + ~.MetadataImport]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_metadata_import" not in self._stubs: + self._stubs["get_metadata_import"] = self.grpc_channel.unary_unary( + "/google.cloud.metastore.v1.DataprocMetastore/GetMetadataImport", + request_serializer=metastore.GetMetadataImportRequest.serialize, + response_deserializer=metastore.MetadataImport.deserialize, + ) + return self._stubs["get_metadata_import"] + + @property + def create_metadata_import( + self, + ) -> Callable[[metastore.CreateMetadataImportRequest], operations_pb2.Operation]: + r"""Return a callable for the create metadata import method over gRPC. + + Creates a new MetadataImport in a given project and + location. + + Returns: + Callable[[~.CreateMetadataImportRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_metadata_import" not in self._stubs: + self._stubs["create_metadata_import"] = self.grpc_channel.unary_unary( + "/google.cloud.metastore.v1.DataprocMetastore/CreateMetadataImport", + request_serializer=metastore.CreateMetadataImportRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_metadata_import"] + + @property + def update_metadata_import( + self, + ) -> Callable[[metastore.UpdateMetadataImportRequest], operations_pb2.Operation]: + r"""Return a callable for the update metadata import method over gRPC. + + Updates a single import. + Only the description field of MetadataImport is + supported to be updated. + + Returns: + Callable[[~.UpdateMetadataImportRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_metadata_import" not in self._stubs: + self._stubs["update_metadata_import"] = self.grpc_channel.unary_unary( + "/google.cloud.metastore.v1.DataprocMetastore/UpdateMetadataImport", + request_serializer=metastore.UpdateMetadataImportRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_metadata_import"] + + @property + def export_metadata( + self, + ) -> Callable[[metastore.ExportMetadataRequest], operations_pb2.Operation]: + r"""Return a callable for the export metadata method over gRPC. + + Exports metadata from a service. + + Returns: + Callable[[~.ExportMetadataRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "export_metadata" not in self._stubs: + self._stubs["export_metadata"] = self.grpc_channel.unary_unary( + "/google.cloud.metastore.v1.DataprocMetastore/ExportMetadata", + request_serializer=metastore.ExportMetadataRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["export_metadata"] + + +__all__ = ("DataprocMetastoreGrpcTransport",) diff --git a/google/cloud/metastore_v1/services/dataproc_metastore/transports/grpc_asyncio.py b/google/cloud/metastore_v1/services/dataproc_metastore/transports/grpc_asyncio.py new file mode 100644 index 0000000..57adf57 --- /dev/null +++ b/google/cloud/metastore_v1/services/dataproc_metastore/transports/grpc_asyncio.py @@ -0,0 +1,552 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1 # type: ignore +from google.api_core import grpc_helpers_async # type: ignore +from google.api_core import operations_v1 # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +import packaging.version + +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.metastore_v1.types import metastore +from google.longrunning import operations_pb2 # type: ignore +from .base import DataprocMetastoreTransport, DEFAULT_CLIENT_INFO +from .grpc import DataprocMetastoreGrpcTransport + + +class DataprocMetastoreGrpcAsyncIOTransport(DataprocMetastoreTransport): + """gRPC AsyncIO backend transport for DataprocMetastore. + + Configures and manages metastore services. Metastore services are + fully managed, highly available, auto-scaled, auto-healing, + OSS-native deployments of technical metadata management software. + Each metastore service exposes a network endpoint through which + metadata queries are served. Metadata queries can originate from a + variety of sources, including Apache Hive, Apache Presto, and Apache + Spark. + + The Dataproc Metastore API defines the following resource model: + + - The service works with a collection of Google Cloud projects, + named: ``/projects/*`` + + - Each project has a collection of available locations, named: + ``/locations/*`` (a location must refer to a Google Cloud + ``region``) + + - Each location has a collection of services, named: + ``/services/*`` + + - Dataproc Metastore services are resources with names of the form: + + ``/projects/{project_number}/locations/{location_id}/services/{service_id}``. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "metastore.googleapis.com", + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + self_signed_jwt_kwargs = cls._get_self_signed_jwt_kwargs(host, scopes) + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + **self_signed_jwt_kwargs, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "metastore.googleapis.com", + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: aio.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id=None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or applicatin default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + credentials=self._credentials, + credentials_file=credentials_file, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsAsyncClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Sanity check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsAsyncClient( + self.grpc_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def list_services( + self, + ) -> Callable[ + [metastore.ListServicesRequest], Awaitable[metastore.ListServicesResponse] + ]: + r"""Return a callable for the list services method over gRPC. + + Lists services in a project and location. + + Returns: + Callable[[~.ListServicesRequest], + Awaitable[~.ListServicesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_services" not in self._stubs: + self._stubs["list_services"] = self.grpc_channel.unary_unary( + "/google.cloud.metastore.v1.DataprocMetastore/ListServices", + request_serializer=metastore.ListServicesRequest.serialize, + response_deserializer=metastore.ListServicesResponse.deserialize, + ) + return self._stubs["list_services"] + + @property + def get_service( + self, + ) -> Callable[[metastore.GetServiceRequest], Awaitable[metastore.Service]]: + r"""Return a callable for the get service method over gRPC. + + Gets the details of a single service. + + Returns: + Callable[[~.GetServiceRequest], + Awaitable[~.Service]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_service" not in self._stubs: + self._stubs["get_service"] = self.grpc_channel.unary_unary( + "/google.cloud.metastore.v1.DataprocMetastore/GetService", + request_serializer=metastore.GetServiceRequest.serialize, + response_deserializer=metastore.Service.deserialize, + ) + return self._stubs["get_service"] + + @property + def create_service( + self, + ) -> Callable[ + [metastore.CreateServiceRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the create service method over gRPC. + + Creates a metastore service in a project and + location. + + Returns: + Callable[[~.CreateServiceRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_service" not in self._stubs: + self._stubs["create_service"] = self.grpc_channel.unary_unary( + "/google.cloud.metastore.v1.DataprocMetastore/CreateService", + request_serializer=metastore.CreateServiceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_service"] + + @property + def update_service( + self, + ) -> Callable[ + [metastore.UpdateServiceRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the update service method over gRPC. + + Updates the parameters of a single service. + + Returns: + Callable[[~.UpdateServiceRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_service" not in self._stubs: + self._stubs["update_service"] = self.grpc_channel.unary_unary( + "/google.cloud.metastore.v1.DataprocMetastore/UpdateService", + request_serializer=metastore.UpdateServiceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_service"] + + @property + def delete_service( + self, + ) -> Callable[ + [metastore.DeleteServiceRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the delete service method over gRPC. + + Deletes a single service. + + Returns: + Callable[[~.DeleteServiceRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_service" not in self._stubs: + self._stubs["delete_service"] = self.grpc_channel.unary_unary( + "/google.cloud.metastore.v1.DataprocMetastore/DeleteService", + request_serializer=metastore.DeleteServiceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_service"] + + @property + def list_metadata_imports( + self, + ) -> Callable[ + [metastore.ListMetadataImportsRequest], + Awaitable[metastore.ListMetadataImportsResponse], + ]: + r"""Return a callable for the list metadata imports method over gRPC. + + Lists imports in a service. + + Returns: + Callable[[~.ListMetadataImportsRequest], + Awaitable[~.ListMetadataImportsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_metadata_imports" not in self._stubs: + self._stubs["list_metadata_imports"] = self.grpc_channel.unary_unary( + "/google.cloud.metastore.v1.DataprocMetastore/ListMetadataImports", + request_serializer=metastore.ListMetadataImportsRequest.serialize, + response_deserializer=metastore.ListMetadataImportsResponse.deserialize, + ) + return self._stubs["list_metadata_imports"] + + @property + def get_metadata_import( + self, + ) -> Callable[ + [metastore.GetMetadataImportRequest], Awaitable[metastore.MetadataImport] + ]: + r"""Return a callable for the get metadata import method over gRPC. + + Gets details of a single import. + + Returns: + Callable[[~.GetMetadataImportRequest], + Awaitable[~.MetadataImport]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_metadata_import" not in self._stubs: + self._stubs["get_metadata_import"] = self.grpc_channel.unary_unary( + "/google.cloud.metastore.v1.DataprocMetastore/GetMetadataImport", + request_serializer=metastore.GetMetadataImportRequest.serialize, + response_deserializer=metastore.MetadataImport.deserialize, + ) + return self._stubs["get_metadata_import"] + + @property + def create_metadata_import( + self, + ) -> Callable[ + [metastore.CreateMetadataImportRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the create metadata import method over gRPC. + + Creates a new MetadataImport in a given project and + location. + + Returns: + Callable[[~.CreateMetadataImportRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_metadata_import" not in self._stubs: + self._stubs["create_metadata_import"] = self.grpc_channel.unary_unary( + "/google.cloud.metastore.v1.DataprocMetastore/CreateMetadataImport", + request_serializer=metastore.CreateMetadataImportRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_metadata_import"] + + @property + def update_metadata_import( + self, + ) -> Callable[ + [metastore.UpdateMetadataImportRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the update metadata import method over gRPC. + + Updates a single import. + Only the description field of MetadataImport is + supported to be updated. + + Returns: + Callable[[~.UpdateMetadataImportRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_metadata_import" not in self._stubs: + self._stubs["update_metadata_import"] = self.grpc_channel.unary_unary( + "/google.cloud.metastore.v1.DataprocMetastore/UpdateMetadataImport", + request_serializer=metastore.UpdateMetadataImportRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_metadata_import"] + + @property + def export_metadata( + self, + ) -> Callable[ + [metastore.ExportMetadataRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the export metadata method over gRPC. + + Exports metadata from a service. + + Returns: + Callable[[~.ExportMetadataRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "export_metadata" not in self._stubs: + self._stubs["export_metadata"] = self.grpc_channel.unary_unary( + "/google.cloud.metastore.v1.DataprocMetastore/ExportMetadata", + request_serializer=metastore.ExportMetadataRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["export_metadata"] + + +__all__ = ("DataprocMetastoreGrpcAsyncIOTransport",) diff --git a/google/cloud/metastore_v1/types/__init__.py b/google/cloud/metastore_v1/types/__init__.py new file mode 100644 index 0000000..7ce234e --- /dev/null +++ b/google/cloud/metastore_v1/types/__init__.py @@ -0,0 +1,66 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .metastore import ( + CreateMetadataImportRequest, + CreateServiceRequest, + DatabaseDumpSpec, + DeleteServiceRequest, + ExportMetadataRequest, + GetMetadataImportRequest, + GetServiceRequest, + HiveMetastoreConfig, + KerberosConfig, + ListMetadataImportsRequest, + ListMetadataImportsResponse, + ListServicesRequest, + ListServicesResponse, + LocationMetadata, + MaintenanceWindow, + MetadataExport, + MetadataImport, + MetadataManagementActivity, + OperationMetadata, + Secret, + Service, + UpdateMetadataImportRequest, + UpdateServiceRequest, +) + +__all__ = ( + "CreateMetadataImportRequest", + "CreateServiceRequest", + "DatabaseDumpSpec", + "DeleteServiceRequest", + "ExportMetadataRequest", + "GetMetadataImportRequest", + "GetServiceRequest", + "HiveMetastoreConfig", + "KerberosConfig", + "ListMetadataImportsRequest", + "ListMetadataImportsResponse", + "ListServicesRequest", + "ListServicesResponse", + "LocationMetadata", + "MaintenanceWindow", + "MetadataExport", + "MetadataImport", + "MetadataManagementActivity", + "OperationMetadata", + "Secret", + "Service", + "UpdateMetadataImportRequest", + "UpdateServiceRequest", +) diff --git a/google/cloud/metastore_v1/types/metastore.py b/google/cloud/metastore_v1/types/metastore.py new file mode 100644 index 0000000..fdc6a16 --- /dev/null +++ b/google/cloud/metastore_v1/types/metastore.py @@ -0,0 +1,869 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import proto # type: ignore + +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.protobuf import wrappers_pb2 # type: ignore +from google.type import dayofweek_pb2 # type: ignore + + +__protobuf__ = proto.module( + package="google.cloud.metastore.v1", + manifest={ + "Service", + "MaintenanceWindow", + "HiveMetastoreConfig", + "KerberosConfig", + "Secret", + "MetadataManagementActivity", + "MetadataImport", + "MetadataExport", + "ListServicesRequest", + "ListServicesResponse", + "GetServiceRequest", + "CreateServiceRequest", + "UpdateServiceRequest", + "DeleteServiceRequest", + "ListMetadataImportsRequest", + "ListMetadataImportsResponse", + "GetMetadataImportRequest", + "CreateMetadataImportRequest", + "UpdateMetadataImportRequest", + "ExportMetadataRequest", + "OperationMetadata", + "LocationMetadata", + "DatabaseDumpSpec", + }, +) + + +class Service(proto.Message): + r"""A managed metastore service that serves metadata queries. + Attributes: + hive_metastore_config (google.cloud.metastore_v1.types.HiveMetastoreConfig): + Configuration information specific to running + Hive metastore software as the metastore + service. + name (str): + Immutable. The relative resource name of the metastore + service, of the form: + + ``projects/{project_number}/locations/{location_id}/services/{service_id}``. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the metastore + service was created. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the metastore + service was last updated. + labels (Sequence[google.cloud.metastore_v1.types.Service.LabelsEntry]): + User-defined labels for the metastore + service. + network (str): + Immutable. The relative resource name of the VPC network on + which the instance can be accessed. It is specified in the + following form: + + ``projects/{project_number}/global/networks/{network_id}``. + endpoint_uri (str): + Output only. The URI of the endpoint used to + access the metastore service. + port (int): + The TCP port at which the metastore service + is reached. Default: 9083. + state (google.cloud.metastore_v1.types.Service.State): + Output only. The current state of the + metastore service. + state_message (str): + Output only. Additional information about the + current state of the metastore service, if + available. + artifact_gcs_uri (str): + Output only. A Cloud Storage URI (starting with ``gs://``) + that specifies where artifacts related to the metastore + service are stored. + tier (google.cloud.metastore_v1.types.Service.Tier): + The tier of the service. + maintenance_window (google.cloud.metastore_v1.types.MaintenanceWindow): + The one hour maintenance window of the + metastore service. This specifies when the + service can be restarted for maintenance + purposes in UTC time. + uid (str): + Output only. The globally unique resource + identifier of the metastore service. + metadata_management_activity (google.cloud.metastore_v1.types.MetadataManagementActivity): + Output only. The metadata management + activities of the metastore service. + release_channel (google.cloud.metastore_v1.types.Service.ReleaseChannel): + Immutable. The release channel of the service. If + unspecified, defaults to ``STABLE``. + """ + + class State(proto.Enum): + r"""The current state of the metastore service.""" + STATE_UNSPECIFIED = 0 + CREATING = 1 + ACTIVE = 2 + SUSPENDING = 3 + SUSPENDED = 4 + UPDATING = 5 + DELETING = 6 + ERROR = 7 + + class Tier(proto.Enum): + r"""Available service tiers.""" + TIER_UNSPECIFIED = 0 + DEVELOPER = 1 + ENTERPRISE = 3 + + class ReleaseChannel(proto.Enum): + r"""Release channels bundle features of varying levels of + stability. Newer features may be introduced initially into less + stable release channels and can be automatically promoted into + more stable release channels. + """ + RELEASE_CHANNEL_UNSPECIFIED = 0 + CANARY = 1 + STABLE = 2 + + hive_metastore_config = proto.Field( + proto.MESSAGE, + number=5, + oneof="metastore_config", + message="HiveMetastoreConfig", + ) + name = proto.Field(proto.STRING, number=1,) + create_time = proto.Field(proto.MESSAGE, number=2, message=timestamp_pb2.Timestamp,) + update_time = proto.Field(proto.MESSAGE, number=3, message=timestamp_pb2.Timestamp,) + labels = proto.MapField(proto.STRING, proto.STRING, number=4,) + network = proto.Field(proto.STRING, number=7,) + endpoint_uri = proto.Field(proto.STRING, number=8,) + port = proto.Field(proto.INT32, number=9,) + state = proto.Field(proto.ENUM, number=10, enum=State,) + state_message = proto.Field(proto.STRING, number=11,) + artifact_gcs_uri = proto.Field(proto.STRING, number=12,) + tier = proto.Field(proto.ENUM, number=13, enum=Tier,) + maintenance_window = proto.Field( + proto.MESSAGE, number=15, message="MaintenanceWindow", + ) + uid = proto.Field(proto.STRING, number=16,) + metadata_management_activity = proto.Field( + proto.MESSAGE, number=17, message="MetadataManagementActivity", + ) + release_channel = proto.Field(proto.ENUM, number=19, enum=ReleaseChannel,) + + +class MaintenanceWindow(proto.Message): + r"""Maintenance window. This specifies when Dataproc Metastore + may perform system maintenance operation to the service. + + Attributes: + hour_of_day (google.protobuf.wrappers_pb2.Int32Value): + The hour of day (0-23) when the window + starts. + day_of_week (google.type.dayofweek_pb2.DayOfWeek): + The day of week, when the window starts. + """ + + hour_of_day = proto.Field(proto.MESSAGE, number=1, message=wrappers_pb2.Int32Value,) + day_of_week = proto.Field(proto.ENUM, number=2, enum=dayofweek_pb2.DayOfWeek,) + + +class HiveMetastoreConfig(proto.Message): + r"""Specifies configuration information specific to running Hive + metastore software as the metastore service. + + Attributes: + version (str): + Immutable. The Hive metastore schema version. + config_overrides (Sequence[google.cloud.metastore_v1.types.HiveMetastoreConfig.ConfigOverridesEntry]): + A mapping of Hive metastore configuration key-value pairs to + apply to the Hive metastore (configured in + ``hive-site.xml``). The mappings override system defaults + (some keys cannot be overridden). + kerberos_config (google.cloud.metastore_v1.types.KerberosConfig): + Information used to configure the Hive metastore service as + a service principal in a Kerberos realm. To disable + Kerberos, use the ``UpdateService`` method and specify this + field's path (``hive_metastore_config.kerberos_config``) in + the request's ``update_mask`` while omitting this field from + the request's ``service``. + """ + + version = proto.Field(proto.STRING, number=1,) + config_overrides = proto.MapField(proto.STRING, proto.STRING, number=2,) + kerberos_config = proto.Field(proto.MESSAGE, number=3, message="KerberosConfig",) + + +class KerberosConfig(proto.Message): + r"""Configuration information for a Kerberos principal. + Attributes: + keytab (google.cloud.metastore_v1.types.Secret): + A Kerberos keytab file that can be used to + authenticate a service principal with a Kerberos + Key Distribution Center (KDC). + principal (str): + A Kerberos principal that exists in the both the keytab the + KDC to authenticate as. A typical principal is of the form + ``primary/instance@REALM``, but there is no exact format. + krb5_config_gcs_uri (str): + A Cloud Storage URI that specifies the path to a krb5.conf + file. It is of the form + ``gs://{bucket_name}/path/to/krb5.conf``, although the file + does not need to be named krb5.conf explicitly. + """ + + keytab = proto.Field(proto.MESSAGE, number=1, message="Secret",) + principal = proto.Field(proto.STRING, number=2,) + krb5_config_gcs_uri = proto.Field(proto.STRING, number=3,) + + +class Secret(proto.Message): + r"""A securely stored value. + Attributes: + cloud_secret (str): + The relative resource name of a Secret Manager secret + version, in the following form: + + ``projects/{project_number}/secrets/{secret_id}/versions/{version_id}``. + """ + + cloud_secret = proto.Field(proto.STRING, number=2, oneof="value",) + + +class MetadataManagementActivity(proto.Message): + r"""The metadata management activities of the metastore service. + Attributes: + metadata_exports (Sequence[google.cloud.metastore_v1.types.MetadataExport]): + Output only. The latest metadata exports of + the metastore service. + """ + + metadata_exports = proto.RepeatedField( + proto.MESSAGE, number=1, message="MetadataExport", + ) + + +class MetadataImport(proto.Message): + r"""A metastore resource that imports metadata. + Attributes: + database_dump (google.cloud.metastore_v1.types.MetadataImport.DatabaseDump): + Immutable. A database dump from a pre- + xisting metastore's database. + name (str): + Immutable. The relative resource name of the metadata + import, of the form: + + ``projects/{project_number}/locations/{location_id}/services/{service_id}/metadataImports/{metadata_import_id}``. + description (str): + The description of the metadata import. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the metadata + import was created. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the metadata + import was last updated. + state (google.cloud.metastore_v1.types.MetadataImport.State): + Output only. The current state of the + metadata import. + """ + + class State(proto.Enum): + r"""The current state of the metadata import.""" + STATE_UNSPECIFIED = 0 + RUNNING = 1 + SUCCEEDED = 2 + UPDATING = 3 + FAILED = 4 + + class DatabaseDump(proto.Message): + r"""A specification of the location of and metadata about a + database dump from a relational database management system. + + Attributes: + database_type (google.cloud.metastore_v1.types.MetadataImport.DatabaseDump.DatabaseType): + The type of the database. + gcs_uri (str): + A Cloud Storage object or folder URI that specifies the + source from which to import metadata. It must begin with + ``gs://``. + type_ (google.cloud.metastore_v1.types.DatabaseDumpSpec.Type): + Optional. The type of the database dump. If unspecified, + defaults to ``MYSQL``. + """ + + class DatabaseType(proto.Enum): + r"""The type of the database.""" + DATABASE_TYPE_UNSPECIFIED = 0 + MYSQL = 1 + + database_type = proto.Field( + proto.ENUM, number=1, enum="MetadataImport.DatabaseDump.DatabaseType", + ) + gcs_uri = proto.Field(proto.STRING, number=2,) + type_ = proto.Field(proto.ENUM, number=4, enum="DatabaseDumpSpec.Type",) + + database_dump = proto.Field( + proto.MESSAGE, number=6, oneof="metadata", message=DatabaseDump, + ) + name = proto.Field(proto.STRING, number=1,) + description = proto.Field(proto.STRING, number=2,) + create_time = proto.Field(proto.MESSAGE, number=3, message=timestamp_pb2.Timestamp,) + update_time = proto.Field(proto.MESSAGE, number=4, message=timestamp_pb2.Timestamp,) + state = proto.Field(proto.ENUM, number=5, enum=State,) + + +class MetadataExport(proto.Message): + r"""The details of a metadata export operation. + Attributes: + destination_gcs_uri (str): + Output only. A Cloud Storage URI of a folder that metadata + are exported to, in the form of + ``gs:////``, + where ```` is automatically generated. + start_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the export + started. + end_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the export ended. + state (google.cloud.metastore_v1.types.MetadataExport.State): + Output only. The current state of the export. + database_dump_type (google.cloud.metastore_v1.types.DatabaseDumpSpec.Type): + Output only. The type of the database dump. + """ + + class State(proto.Enum): + r"""The current state of the metadata export.""" + STATE_UNSPECIFIED = 0 + RUNNING = 1 + SUCCEEDED = 2 + FAILED = 3 + CANCELLED = 4 + + destination_gcs_uri = proto.Field(proto.STRING, number=4, oneof="destination",) + start_time = proto.Field(proto.MESSAGE, number=1, message=timestamp_pb2.Timestamp,) + end_time = proto.Field(proto.MESSAGE, number=2, message=timestamp_pb2.Timestamp,) + state = proto.Field(proto.ENUM, number=3, enum=State,) + database_dump_type = proto.Field( + proto.ENUM, number=5, enum="DatabaseDumpSpec.Type", + ) + + +class ListServicesRequest(proto.Message): + r"""Request message for + [DataprocMetastore.ListServices][google.cloud.metastore.v1.DataprocMetastore.ListServices]. + + Attributes: + parent (str): + Required. The relative resource name of the location of + metastore services to list, in the following form: + + ``projects/{project_number}/locations/{location_id}``. + page_size (int): + Optional. The maximum number of services to + return. The response may contain less than the + maximum number. If unspecified, no more than 500 + services are returned. The maximum value is + 1000; values above 1000 are changed to 1000. + page_token (str): + Optional. A page token, received from a previous + [DataprocMetastore.ListServices][google.cloud.metastore.v1.DataprocMetastore.ListServices] + call. Provide this token to retrieve the subsequent page. + + To retrieve the first page, supply an empty page token. + + When paginating, other parameters provided to + [DataprocMetastore.ListServices][google.cloud.metastore.v1.DataprocMetastore.ListServices] + must match the call that provided the page token. + filter (str): + Optional. The filter to apply to list + results. + order_by (str): + Optional. Specify the ordering of results as described in + `Sorting + Order `__. + If not specified, the results will be sorted in the default + order. + """ + + parent = proto.Field(proto.STRING, number=1,) + page_size = proto.Field(proto.INT32, number=2,) + page_token = proto.Field(proto.STRING, number=3,) + filter = proto.Field(proto.STRING, number=4,) + order_by = proto.Field(proto.STRING, number=5,) + + +class ListServicesResponse(proto.Message): + r"""Response message for + [DataprocMetastore.ListServices][google.cloud.metastore.v1.DataprocMetastore.ListServices]. + + Attributes: + services (Sequence[google.cloud.metastore_v1.types.Service]): + The services in the specified location. + next_page_token (str): + A token that can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. + unreachable (Sequence[str]): + Locations that could not be reached. + """ + + @property + def raw_page(self): + return self + + services = proto.RepeatedField(proto.MESSAGE, number=1, message="Service",) + next_page_token = proto.Field(proto.STRING, number=2,) + unreachable = proto.RepeatedField(proto.STRING, number=3,) + + +class GetServiceRequest(proto.Message): + r"""Request message for + [DataprocMetastore.GetService][google.cloud.metastore.v1.DataprocMetastore.GetService]. + + Attributes: + name (str): + Required. The relative resource name of the metastore + service to retrieve, in the following form: + + ``projects/{project_number}/locations/{location_id}/services/{service_id}``. + """ + + name = proto.Field(proto.STRING, number=1,) + + +class CreateServiceRequest(proto.Message): + r"""Request message for + [DataprocMetastore.CreateService][google.cloud.metastore.v1.DataprocMetastore.CreateService]. + + Attributes: + parent (str): + Required. The relative resource name of the location in + which to create a metastore service, in the following form: + + ``projects/{project_number}/locations/{location_id}``. + service_id (str): + Required. The ID of the metastore service, + which is used as the final component of the + metastore service's name. + This value must be between 2 and 63 characters + long inclusive, begin with a letter, end with a + letter or number, and consist of alpha-numeric + ASCII characters or hyphens. + service (google.cloud.metastore_v1.types.Service): + Required. The Metastore service to create. The ``name`` + field is ignored. The ID of the created metastore service + must be provided in the request's ``service_id`` field. + request_id (str): + Optional. A request ID. Specify a unique request ID to allow + the server to ignore the request if it has completed. The + server will ignore subsequent requests that provide a + duplicate request ID for at least 60 minutes after the first + request. + + For example, if an initial request times out, followed by + another request with the same request ID, the server ignores + the second request to prevent the creation of duplicate + commitments. + + The request ID must be a valid + `UUID `__ + A zero UUID (00000000-0000-0000-0000-000000000000) is not + supported. + """ + + parent = proto.Field(proto.STRING, number=1,) + service_id = proto.Field(proto.STRING, number=2,) + service = proto.Field(proto.MESSAGE, number=3, message="Service",) + request_id = proto.Field(proto.STRING, number=4,) + + +class UpdateServiceRequest(proto.Message): + r"""Request message for + [DataprocMetastore.UpdateService][google.cloud.metastore.v1.DataprocMetastore.UpdateService]. + + Attributes: + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. A field mask used to specify the fields to be + overwritten in the metastore service resource by the update. + Fields specified in the ``update_mask`` are relative to the + resource (not to the full request). A field is overwritten + if it is in the mask. + service (google.cloud.metastore_v1.types.Service): + Required. The metastore service to update. The server only + merges fields in the service if they are specified in + ``update_mask``. + + The metastore service's ``name`` field is used to identify + the metastore service to be updated. + request_id (str): + Optional. A request ID. Specify a unique request ID to allow + the server to ignore the request if it has completed. The + server will ignore subsequent requests that provide a + duplicate request ID for at least 60 minutes after the first + request. + + For example, if an initial request times out, followed by + another request with the same request ID, the server ignores + the second request to prevent the creation of duplicate + commitments. + + The request ID must be a valid + `UUID `__ + A zero UUID (00000000-0000-0000-0000-000000000000) is not + supported. + """ + + update_mask = proto.Field( + proto.MESSAGE, number=1, message=field_mask_pb2.FieldMask, + ) + service = proto.Field(proto.MESSAGE, number=2, message="Service",) + request_id = proto.Field(proto.STRING, number=3,) + + +class DeleteServiceRequest(proto.Message): + r"""Request message for + [DataprocMetastore.DeleteService][google.cloud.metastore.v1.DataprocMetastore.DeleteService]. + + Attributes: + name (str): + Required. The relative resource name of the metastore + service to delete, in the following form: + + ``projects/{project_number}/locations/{location_id}/services/{service_id}``. + request_id (str): + Optional. A request ID. Specify a unique request ID to allow + the server to ignore the request if it has completed. The + server will ignore subsequent requests that provide a + duplicate request ID for at least 60 minutes after the first + request. + + For example, if an initial request times out, followed by + another request with the same request ID, the server ignores + the second request to prevent the creation of duplicate + commitments. + + The request ID must be a valid + `UUID `__ + A zero UUID (00000000-0000-0000-0000-000000000000) is not + supported. + """ + + name = proto.Field(proto.STRING, number=1,) + request_id = proto.Field(proto.STRING, number=2,) + + +class ListMetadataImportsRequest(proto.Message): + r"""Request message for + [DataprocMetastore.ListMetadataImports][google.cloud.metastore.v1.DataprocMetastore.ListMetadataImports]. + + Attributes: + parent (str): + Required. The relative resource name of the service whose + metadata imports to list, in the following form: + + ``projects/{project_number}/locations/{location_id}/services/{service_id}/metadataImports``. + page_size (int): + Optional. The maximum number of imports to + return. The response may contain less than the + maximum number. If unspecified, no more than 500 + imports are returned. The maximum value is 1000; + values above 1000 are changed to 1000. + page_token (str): + Optional. A page token, received from a previous + [DataprocMetastore.ListServices][google.cloud.metastore.v1.DataprocMetastore.ListServices] + call. Provide this token to retrieve the subsequent page. + + To retrieve the first page, supply an empty page token. + + When paginating, other parameters provided to + [DataprocMetastore.ListServices][google.cloud.metastore.v1.DataprocMetastore.ListServices] + must match the call that provided the page token. + filter (str): + Optional. The filter to apply to list + results. + order_by (str): + Optional. Specify the ordering of results as described in + `Sorting + Order `__. + If not specified, the results will be sorted in the default + order. + """ + + parent = proto.Field(proto.STRING, number=1,) + page_size = proto.Field(proto.INT32, number=2,) + page_token = proto.Field(proto.STRING, number=3,) + filter = proto.Field(proto.STRING, number=4,) + order_by = proto.Field(proto.STRING, number=5,) + + +class ListMetadataImportsResponse(proto.Message): + r"""Response message for + [DataprocMetastore.ListMetadataImports][google.cloud.metastore.v1.DataprocMetastore.ListMetadataImports]. + + Attributes: + metadata_imports (Sequence[google.cloud.metastore_v1.types.MetadataImport]): + The imports in the specified service. + next_page_token (str): + A token that can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. + unreachable (Sequence[str]): + Locations that could not be reached. + """ + + @property + def raw_page(self): + return self + + metadata_imports = proto.RepeatedField( + proto.MESSAGE, number=1, message="MetadataImport", + ) + next_page_token = proto.Field(proto.STRING, number=2,) + unreachable = proto.RepeatedField(proto.STRING, number=3,) + + +class GetMetadataImportRequest(proto.Message): + r"""Request message for + [DataprocMetastore.GetMetadataImport][google.cloud.metastore.v1.DataprocMetastore.GetMetadataImport]. + + Attributes: + name (str): + Required. The relative resource name of the metadata import + to retrieve, in the following form: + + ``projects/{project_number}/locations/{location_id}/services/{service_id}/metadataImports/{import_id}``. + """ + + name = proto.Field(proto.STRING, number=1,) + + +class CreateMetadataImportRequest(proto.Message): + r"""Request message for + [DataprocMetastore.CreateMetadataImport][google.cloud.metastore.v1.DataprocMetastore.CreateMetadataImport]. + + Attributes: + parent (str): + Required. The relative resource name of the service in which + to create a metastore import, in the following form: + + ``projects/{project_number}/locations/{location_id}/services/{service_id}``. + metadata_import_id (str): + Required. The ID of the metadata import, + which is used as the final component of the + metadata import's name. + This value must be between 1 and 64 characters + long, begin with a letter, end with a letter or + number, and consist of alpha-numeric ASCII + characters or hyphens. + metadata_import (google.cloud.metastore_v1.types.MetadataImport): + Required. The metadata import to create. The ``name`` field + is ignored. The ID of the created metadata import must be + provided in the request's ``metadata_import_id`` field. + request_id (str): + Optional. A request ID. Specify a unique request ID to allow + the server to ignore the request if it has completed. The + server will ignore subsequent requests that provide a + duplicate request ID for at least 60 minutes after the first + request. + + For example, if an initial request times out, followed by + another request with the same request ID, the server ignores + the second request to prevent the creation of duplicate + commitments. + + The request ID must be a valid + `UUID `__ + A zero UUID (00000000-0000-0000-0000-000000000000) is not + supported. + """ + + parent = proto.Field(proto.STRING, number=1,) + metadata_import_id = proto.Field(proto.STRING, number=2,) + metadata_import = proto.Field(proto.MESSAGE, number=3, message="MetadataImport",) + request_id = proto.Field(proto.STRING, number=4,) + + +class UpdateMetadataImportRequest(proto.Message): + r"""Request message for + [DataprocMetastore.UpdateMetadataImport][google.cloud.metastore.v1.DataprocMetastore.UpdateMetadataImport]. + + Attributes: + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. A field mask used to specify the fields to be + overwritten in the metadata import resource by the update. + Fields specified in the ``update_mask`` are relative to the + resource (not to the full request). A field is overwritten + if it is in the mask. + metadata_import (google.cloud.metastore_v1.types.MetadataImport): + Required. The metadata import to update. The server only + merges fields in the import if they are specified in + ``update_mask``. + + The metadata import's ``name`` field is used to identify the + metastore import to be updated. + request_id (str): + Optional. A request ID. Specify a unique request ID to allow + the server to ignore the request if it has completed. The + server will ignore subsequent requests that provide a + duplicate request ID for at least 60 minutes after the first + request. + + For example, if an initial request times out, followed by + another request with the same request ID, the server ignores + the second request to prevent the creation of duplicate + commitments. + + The request ID must be a valid + `UUID `__ + A zero UUID (00000000-0000-0000-0000-000000000000) is not + supported. + """ + + update_mask = proto.Field( + proto.MESSAGE, number=1, message=field_mask_pb2.FieldMask, + ) + metadata_import = proto.Field(proto.MESSAGE, number=2, message="MetadataImport",) + request_id = proto.Field(proto.STRING, number=3,) + + +class ExportMetadataRequest(proto.Message): + r"""Request message for + [DataprocMetastore.ExportMetadata][google.cloud.metastore.v1.DataprocMetastore.ExportMetadata]. + + Attributes: + destination_gcs_folder (str): + A Cloud Storage URI of a folder, in the format + ``gs:///``. A sub-folder + ```` containing exported files will be + created below it. + service (str): + Required. The relative resource name of the metastore + service to run export, in the following form: + + ``projects/{project_id}/locations/{location_id}/services/{service_id}``. + request_id (str): + Optional. A request ID. Specify a unique request ID to allow + the server to ignore the request if it has completed. The + server will ignore subsequent requests that provide a + duplicate request ID for at least 60 minutes after the first + request. + + For example, if an initial request times out, followed by + another request with the same request ID, the server ignores + the second request to prevent the creation of duplicate + commitments. + + The request ID must be a valid + `UUID `__. + A zero UUID (00000000-0000-0000-0000-000000000000) is not + supported. + database_dump_type (google.cloud.metastore_v1.types.DatabaseDumpSpec.Type): + Optional. The type of the database dump. If unspecified, + defaults to ``MYSQL``. + """ + + destination_gcs_folder = proto.Field(proto.STRING, number=2, oneof="destination",) + service = proto.Field(proto.STRING, number=1,) + request_id = proto.Field(proto.STRING, number=3,) + database_dump_type = proto.Field( + proto.ENUM, number=4, enum="DatabaseDumpSpec.Type", + ) + + +class OperationMetadata(proto.Message): + r"""Represents the metadata of a long-running operation. + Attributes: + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time the operation was + created. + end_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time the operation finished + running. + target (str): + Output only. Server-defined resource path for + the target of the operation. + verb (str): + Output only. Name of the verb executed by the + operation. + status_message (str): + Output only. Human-readable status of the + operation, if any. + requested_cancellation (bool): + Output only. Identifies whether the caller has requested + cancellation of the operation. Operations that have + successfully been cancelled have [Operation.error][] value + with a [google.rpc.Status.code][google.rpc.Status.code] of + 1, corresponding to ``Code.CANCELLED``. + api_version (str): + Output only. API version used to start the + operation. + """ + + create_time = proto.Field(proto.MESSAGE, number=1, message=timestamp_pb2.Timestamp,) + end_time = proto.Field(proto.MESSAGE, number=2, message=timestamp_pb2.Timestamp,) + target = proto.Field(proto.STRING, number=3,) + verb = proto.Field(proto.STRING, number=4,) + status_message = proto.Field(proto.STRING, number=5,) + requested_cancellation = proto.Field(proto.BOOL, number=6,) + api_version = proto.Field(proto.STRING, number=7,) + + +class LocationMetadata(proto.Message): + r"""Metadata about the service in a location. + Attributes: + supported_hive_metastore_versions (Sequence[google.cloud.metastore_v1.types.LocationMetadata.HiveMetastoreVersion]): + The versions of Hive Metastore that can be used when + creating a new metastore service in this location. The + server guarantees that exactly one ``HiveMetastoreVersion`` + in the list will set ``is_default``. + """ + + class HiveMetastoreVersion(proto.Message): + r"""A specification of a supported version of the Hive Metastore + software. + + Attributes: + version (str): + The semantic version of the Hive Metastore + software. + is_default (bool): + Whether ``version`` will be chosen by the server if a + metastore service is created with a ``HiveMetastoreConfig`` + that omits the ``version``. + """ + + version = proto.Field(proto.STRING, number=1,) + is_default = proto.Field(proto.BOOL, number=2,) + + supported_hive_metastore_versions = proto.RepeatedField( + proto.MESSAGE, number=1, message=HiveMetastoreVersion, + ) + + +class DatabaseDumpSpec(proto.Message): + r"""The specification of database dump to import from or export + to. + """ + + class Type(proto.Enum): + r"""The type of the database dump.""" + TYPE_UNSPECIFIED = 0 + MYSQL = 1 + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/metastore_v1alpha/__init__.py b/google/cloud/metastore_v1alpha/__init__.py index a175a8e..ba27b5e 100644 --- a/google/cloud/metastore_v1alpha/__init__.py +++ b/google/cloud/metastore_v1alpha/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -16,12 +15,14 @@ # from .services.dataproc_metastore import DataprocMetastoreClient +from .services.dataproc_metastore import DataprocMetastoreAsyncClient + from .types.metastore import Backup from .types.metastore import CreateBackupRequest from .types.metastore import CreateMetadataImportRequest from .types.metastore import CreateServiceRequest -from .types.metastore import DataCatalogConfig from .types.metastore import DatabaseDumpSpec +from .types.metastore import DataCatalogConfig from .types.metastore import DeleteBackupRequest from .types.metastore import DeleteServiceRequest from .types.metastore import ExportMetadataRequest @@ -50,14 +51,15 @@ from .types.metastore import UpdateMetadataImportRequest from .types.metastore import UpdateServiceRequest - __all__ = ( + "DataprocMetastoreAsyncClient", "Backup", "CreateBackupRequest", "CreateMetadataImportRequest", "CreateServiceRequest", "DataCatalogConfig", "DatabaseDumpSpec", + "DataprocMetastoreClient", "DeleteBackupRequest", "DeleteServiceRequest", "ExportMetadataRequest", @@ -85,5 +87,4 @@ "Service", "UpdateMetadataImportRequest", "UpdateServiceRequest", - "DataprocMetastoreClient", ) diff --git a/google/cloud/metastore_v1alpha/gapic_metadata.json b/google/cloud/metastore_v1alpha/gapic_metadata.json new file mode 100644 index 0000000..8aa71ab --- /dev/null +++ b/google/cloud/metastore_v1alpha/gapic_metadata.json @@ -0,0 +1,173 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.metastore_v1alpha", + "protoPackage": "google.cloud.metastore.v1alpha", + "schema": "1.0", + "services": { + "DataprocMetastore": { + "clients": { + "grpc": { + "libraryClient": "DataprocMetastoreClient", + "rpcs": { + "CreateBackup": { + "methods": [ + "create_backup" + ] + }, + "CreateMetadataImport": { + "methods": [ + "create_metadata_import" + ] + }, + "CreateService": { + "methods": [ + "create_service" + ] + }, + "DeleteBackup": { + "methods": [ + "delete_backup" + ] + }, + "DeleteService": { + "methods": [ + "delete_service" + ] + }, + "ExportMetadata": { + "methods": [ + "export_metadata" + ] + }, + "GetBackup": { + "methods": [ + "get_backup" + ] + }, + "GetMetadataImport": { + "methods": [ + "get_metadata_import" + ] + }, + "GetService": { + "methods": [ + "get_service" + ] + }, + "ListBackups": { + "methods": [ + "list_backups" + ] + }, + "ListMetadataImports": { + "methods": [ + "list_metadata_imports" + ] + }, + "ListServices": { + "methods": [ + "list_services" + ] + }, + "RestoreService": { + "methods": [ + "restore_service" + ] + }, + "UpdateMetadataImport": { + "methods": [ + "update_metadata_import" + ] + }, + "UpdateService": { + "methods": [ + "update_service" + ] + } + } + }, + "grpc-async": { + "libraryClient": "DataprocMetastoreAsyncClient", + "rpcs": { + "CreateBackup": { + "methods": [ + "create_backup" + ] + }, + "CreateMetadataImport": { + "methods": [ + "create_metadata_import" + ] + }, + "CreateService": { + "methods": [ + "create_service" + ] + }, + "DeleteBackup": { + "methods": [ + "delete_backup" + ] + }, + "DeleteService": { + "methods": [ + "delete_service" + ] + }, + "ExportMetadata": { + "methods": [ + "export_metadata" + ] + }, + "GetBackup": { + "methods": [ + "get_backup" + ] + }, + "GetMetadataImport": { + "methods": [ + "get_metadata_import" + ] + }, + "GetService": { + "methods": [ + "get_service" + ] + }, + "ListBackups": { + "methods": [ + "list_backups" + ] + }, + "ListMetadataImports": { + "methods": [ + "list_metadata_imports" + ] + }, + "ListServices": { + "methods": [ + "list_services" + ] + }, + "RestoreService": { + "methods": [ + "restore_service" + ] + }, + "UpdateMetadataImport": { + "methods": [ + "update_metadata_import" + ] + }, + "UpdateService": { + "methods": [ + "update_service" + ] + } + } + } + } + } + } +} diff --git a/google/cloud/metastore_v1alpha/services/__init__.py b/google/cloud/metastore_v1alpha/services/__init__.py index 42ffdf2..4de6597 100644 --- a/google/cloud/metastore_v1alpha/services/__init__.py +++ b/google/cloud/metastore_v1alpha/services/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/google/cloud/metastore_v1alpha/services/dataproc_metastore/__init__.py b/google/cloud/metastore_v1alpha/services/dataproc_metastore/__init__.py index 6f26cf1..194ae14 100644 --- a/google/cloud/metastore_v1alpha/services/dataproc_metastore/__init__.py +++ b/google/cloud/metastore_v1alpha/services/dataproc_metastore/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from .client import DataprocMetastoreClient from .async_client import DataprocMetastoreAsyncClient diff --git a/google/cloud/metastore_v1alpha/services/dataproc_metastore/async_client.py b/google/cloud/metastore_v1alpha/services/dataproc_metastore/async_client.py index 629a4e6..7fda261 100644 --- a/google/cloud/metastore_v1alpha/services/dataproc_metastore/async_client.py +++ b/google/cloud/metastore_v1alpha/services/dataproc_metastore/async_client.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from collections import OrderedDict import functools import re @@ -22,20 +20,19 @@ import pkg_resources import google.api_core.client_options as ClientOptions # type: ignore -from google.api_core import exceptions # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore from google.cloud.metastore_v1alpha.services.dataproc_metastore import pagers from google.cloud.metastore_v1alpha.types import metastore -from google.protobuf import empty_pb2 as empty # type: ignore -from google.protobuf import field_mask_pb2 as field_mask # type: ignore -from google.protobuf import timestamp_pb2 as timestamp # type: ignore - +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore from .transports.base import DataprocMetastoreTransport, DEFAULT_CLIENT_INFO from .transports.grpc_asyncio import DataprocMetastoreGrpcAsyncIOTransport from .client import DataprocMetastoreClient @@ -82,31 +79,26 @@ class DataprocMetastoreAsyncClient: parse_network_path = staticmethod(DataprocMetastoreClient.parse_network_path) service_path = staticmethod(DataprocMetastoreClient.service_path) parse_service_path = staticmethod(DataprocMetastoreClient.parse_service_path) - common_billing_account_path = staticmethod( DataprocMetastoreClient.common_billing_account_path ) parse_common_billing_account_path = staticmethod( DataprocMetastoreClient.parse_common_billing_account_path ) - common_folder_path = staticmethod(DataprocMetastoreClient.common_folder_path) parse_common_folder_path = staticmethod( DataprocMetastoreClient.parse_common_folder_path ) - common_organization_path = staticmethod( DataprocMetastoreClient.common_organization_path ) parse_common_organization_path = staticmethod( DataprocMetastoreClient.parse_common_organization_path ) - common_project_path = staticmethod(DataprocMetastoreClient.common_project_path) parse_common_project_path = staticmethod( DataprocMetastoreClient.parse_common_project_path ) - common_location_path = staticmethod(DataprocMetastoreClient.common_location_path) parse_common_location_path = staticmethod( DataprocMetastoreClient.parse_common_location_path @@ -114,7 +106,8 @@ class DataprocMetastoreAsyncClient: @classmethod def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials info. + """Creates an instance of this client using the provided credentials + info. Args: info (dict): The service account private key info. @@ -129,7 +122,7 @@ def from_service_account_info(cls, info: dict, *args, **kwargs): @classmethod def from_service_account_file(cls, filename: str, *args, **kwargs): """Creates an instance of this client using the provided credentials - file. + file. Args: filename (str): The path to the service account private key json @@ -146,7 +139,7 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): @property def transport(self) -> DataprocMetastoreTransport: - """Return the transport used by the client instance. + """Returns the transport used by the client instance. Returns: DataprocMetastoreTransport: The transport used by the client instance. @@ -160,12 +153,12 @@ def transport(self) -> DataprocMetastoreTransport: def __init__( self, *, - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, transport: Union[str, DataprocMetastoreTransport] = "grpc_asyncio", client_options: ClientOptions = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: - """Instantiate the dataproc metastore client. + """Instantiates the dataproc metastore client. Args: credentials (Optional[google.auth.credentials.Credentials]): The @@ -197,7 +190,6 @@ def __init__( google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport creation failed for any reason. """ - self._client = DataprocMetastoreClient( credentials=credentials, transport=transport, @@ -229,7 +221,6 @@ async def list_services( This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -259,7 +250,6 @@ async def list_services( # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent @@ -313,7 +303,6 @@ async def get_service( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -340,7 +329,6 @@ async def get_service( # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -415,7 +403,6 @@ async def create_service( This corresponds to the ``service_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -445,7 +432,6 @@ async def create_service( # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent if service is not None: @@ -486,7 +472,7 @@ async def update_service( request: metastore.UpdateServiceRequest = None, *, service: metastore.Service = None, - update_mask: field_mask.FieldMask = None, + update_mask: field_mask_pb2.FieldMask = None, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), @@ -518,7 +504,6 @@ async def update_service( This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -548,7 +533,6 @@ async def update_service( # If we have keyword arguments corresponding to fields on the # request, apply these. - if service is not None: request.service = service if update_mask is not None: @@ -608,7 +592,6 @@ async def delete_service( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -648,7 +631,6 @@ async def delete_service( # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -673,7 +655,7 @@ async def delete_service( response = operation_async.from_gapic( response, self._client._transport.operations_client, - empty.Empty, + empty_pb2.Empty, metadata_type=metastore.OperationMetadata, ) @@ -704,7 +686,6 @@ async def list_metadata_imports( This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -734,7 +715,6 @@ async def list_metadata_imports( # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent @@ -788,7 +768,6 @@ async def get_metadata_import( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -815,7 +794,6 @@ async def get_metadata_import( # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -889,7 +867,6 @@ async def create_metadata_import( This corresponds to the ``metadata_import_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -919,7 +896,6 @@ async def create_metadata_import( # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent if metadata_import is not None: @@ -960,7 +936,7 @@ async def update_metadata_import( request: metastore.UpdateMetadataImportRequest = None, *, metadata_import: metastore.MetadataImport = None, - update_mask: field_mask.FieldMask = None, + update_mask: field_mask_pb2.FieldMask = None, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), @@ -994,7 +970,6 @@ async def update_metadata_import( This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1024,7 +999,6 @@ async def update_metadata_import( # If we have keyword arguments corresponding to fields on the # request, apply these. - if metadata_import is not None: request.metadata_import = metadata_import if update_mask is not None: @@ -1074,7 +1048,6 @@ async def export_metadata( request (:class:`google.cloud.metastore_v1alpha.types.ExportMetadataRequest`): The request object. Request message for [DataprocMetastore.ExportMetadata][google.cloud.metastore.v1alpha.DataprocMetastore.ExportMetadata]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1091,7 +1064,6 @@ async def export_metadata( """ # Create or coerce a protobuf request object. - request = metastore.ExportMetadataRequest(request) # Wrap the RPC method; this adds retry and timeout information, @@ -1156,7 +1128,6 @@ async def restore_service( This corresponds to the ``backup`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1186,7 +1157,6 @@ async def restore_service( # If we have keyword arguments corresponding to fields on the # request, apply these. - if service is not None: request.service = service if backup is not None: @@ -1244,7 +1214,6 @@ async def list_backups( This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1274,7 +1243,6 @@ async def list_backups( # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent @@ -1328,7 +1296,6 @@ async def get_backup( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1353,7 +1320,6 @@ async def get_backup( # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -1424,7 +1390,6 @@ async def create_backup( This corresponds to the ``backup_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1454,7 +1419,6 @@ async def create_backup( # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent if backup is not None: @@ -1514,7 +1478,6 @@ async def delete_backup( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1554,7 +1517,6 @@ async def delete_backup( # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -1579,7 +1541,7 @@ async def delete_backup( response = operation_async.from_gapic( response, self._client._transport.operations_client, - empty.Empty, + empty_pb2.Empty, metadata_type=metastore.OperationMetadata, ) diff --git a/google/cloud/metastore_v1alpha/services/dataproc_metastore/client.py b/google/cloud/metastore_v1alpha/services/dataproc_metastore/client.py index e52010b..8b0f125 100644 --- a/google/cloud/metastore_v1alpha/services/dataproc_metastore/client.py +++ b/google/cloud/metastore_v1alpha/services/dataproc_metastore/client.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from collections import OrderedDict from distutils import util import os @@ -23,10 +21,10 @@ import pkg_resources from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore @@ -36,10 +34,9 @@ from google.api_core import operation_async # type: ignore from google.cloud.metastore_v1alpha.services.dataproc_metastore import pagers from google.cloud.metastore_v1alpha.types import metastore -from google.protobuf import empty_pb2 as empty # type: ignore -from google.protobuf import field_mask_pb2 as field_mask # type: ignore -from google.protobuf import timestamp_pb2 as timestamp # type: ignore - +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore from .transports.base import DataprocMetastoreTransport, DEFAULT_CLIENT_INFO from .transports.grpc import DataprocMetastoreGrpcTransport from .transports.grpc_asyncio import DataprocMetastoreGrpcAsyncIOTransport @@ -62,7 +59,7 @@ class DataprocMetastoreClientMeta(type): def get_transport_class( cls, label: str = None, ) -> Type[DataprocMetastoreTransport]: - """Return an appropriate transport class. + """Returns an appropriate transport class. Args: label: The name of the desired transport. If none is @@ -108,7 +105,8 @@ class DataprocMetastoreClient(metaclass=DataprocMetastoreClientMeta): @staticmethod def _get_default_mtls_endpoint(api_endpoint): - """Convert api endpoint to mTLS endpoint. + """Converts api endpoint to mTLS endpoint. + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. Args: @@ -142,7 +140,8 @@ def _get_default_mtls_endpoint(api_endpoint): @classmethod def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials info. + """Creates an instance of this client using the provided credentials + info. Args: info (dict): The service account private key info. @@ -159,7 +158,7 @@ def from_service_account_info(cls, info: dict, *args, **kwargs): @classmethod def from_service_account_file(cls, filename: str, *args, **kwargs): """Creates an instance of this client using the provided credentials - file. + file. Args: filename (str): The path to the service account private key json @@ -178,23 +177,24 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): @property def transport(self) -> DataprocMetastoreTransport: - """Return the transport used by the client instance. + """Returns the transport used by the client instance. Returns: - DataprocMetastoreTransport: The transport used by the client instance. + DataprocMetastoreTransport: The transport used by the client + instance. """ return self._transport @staticmethod def backup_path(project: str, location: str, service: str, backup: str,) -> str: - """Return a fully-qualified backup string.""" + """Returns a fully-qualified backup string.""" return "projects/{project}/locations/{location}/services/{service}/backups/{backup}".format( project=project, location=location, service=service, backup=backup, ) @staticmethod def parse_backup_path(path: str) -> Dict[str, str]: - """Parse a backup path into its component segments.""" + """Parses a backup path into its component segments.""" m = re.match( r"^projects/(?P.+?)/locations/(?P.+?)/services/(?P.+?)/backups/(?P.+?)$", path, @@ -205,7 +205,7 @@ def parse_backup_path(path: str) -> Dict[str, str]: def metadata_import_path( project: str, location: str, service: str, metadata_import: str, ) -> str: - """Return a fully-qualified metadata_import string.""" + """Returns a fully-qualified metadata_import string.""" return "projects/{project}/locations/{location}/services/{service}/metadataImports/{metadata_import}".format( project=project, location=location, @@ -215,7 +215,7 @@ def metadata_import_path( @staticmethod def parse_metadata_import_path(path: str) -> Dict[str, str]: - """Parse a metadata_import path into its component segments.""" + """Parses a metadata_import path into its component segments.""" m = re.match( r"^projects/(?P.+?)/locations/(?P.+?)/services/(?P.+?)/metadataImports/(?P.+?)$", path, @@ -224,14 +224,14 @@ def parse_metadata_import_path(path: str) -> Dict[str, str]: @staticmethod def network_path(project: str, network: str,) -> str: - """Return a fully-qualified network string.""" + """Returns a fully-qualified network string.""" return "projects/{project}/global/networks/{network}".format( project=project, network=network, ) @staticmethod def parse_network_path(path: str) -> Dict[str, str]: - """Parse a network path into its component segments.""" + """Parses a network path into its component segments.""" m = re.match( r"^projects/(?P.+?)/global/networks/(?P.+?)$", path ) @@ -239,14 +239,14 @@ def parse_network_path(path: str) -> Dict[str, str]: @staticmethod def service_path(project: str, location: str, service: str,) -> str: - """Return a fully-qualified service string.""" + """Returns a fully-qualified service string.""" return "projects/{project}/locations/{location}/services/{service}".format( project=project, location=location, service=service, ) @staticmethod def parse_service_path(path: str) -> Dict[str, str]: - """Parse a service path into its component segments.""" + """Parses a service path into its component segments.""" m = re.match( r"^projects/(?P.+?)/locations/(?P.+?)/services/(?P.+?)$", path, @@ -255,7 +255,7 @@ def parse_service_path(path: str) -> Dict[str, str]: @staticmethod def common_billing_account_path(billing_account: str,) -> str: - """Return a fully-qualified billing_account string.""" + """Returns a fully-qualified billing_account string.""" return "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -268,7 +268,7 @@ def parse_common_billing_account_path(path: str) -> Dict[str, str]: @staticmethod def common_folder_path(folder: str,) -> str: - """Return a fully-qualified folder string.""" + """Returns a fully-qualified folder string.""" return "folders/{folder}".format(folder=folder,) @staticmethod @@ -279,7 +279,7 @@ def parse_common_folder_path(path: str) -> Dict[str, str]: @staticmethod def common_organization_path(organization: str,) -> str: - """Return a fully-qualified organization string.""" + """Returns a fully-qualified organization string.""" return "organizations/{organization}".format(organization=organization,) @staticmethod @@ -290,7 +290,7 @@ def parse_common_organization_path(path: str) -> Dict[str, str]: @staticmethod def common_project_path(project: str,) -> str: - """Return a fully-qualified project string.""" + """Returns a fully-qualified project string.""" return "projects/{project}".format(project=project,) @staticmethod @@ -301,7 +301,7 @@ def parse_common_project_path(path: str) -> Dict[str, str]: @staticmethod def common_location_path(project: str, location: str,) -> str: - """Return a fully-qualified location string.""" + """Returns a fully-qualified location string.""" return "projects/{project}/locations/{location}".format( project=project, location=location, ) @@ -315,12 +315,12 @@ def parse_common_location_path(path: str) -> Dict[str, str]: def __init__( self, *, - credentials: Optional[credentials.Credentials] = None, + credentials: Optional[ga_credentials.Credentials] = None, transport: Union[str, DataprocMetastoreTransport, None] = None, client_options: Optional[client_options_lib.ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: - """Instantiate the dataproc metastore client. + """Instantiates the dataproc metastore client. Args: credentials (Optional[google.auth.credentials.Credentials]): The @@ -375,9 +375,10 @@ def __init__( client_cert_source_func = client_options.client_cert_source else: is_mtls = mtls.has_default_client_cert_source() - client_cert_source_func = ( - mtls.default_client_cert_source() if is_mtls else None - ) + if is_mtls: + client_cert_source_func = mtls.default_client_cert_source() + else: + client_cert_source_func = None # Figure out which api endpoint to use. if client_options.api_endpoint is not None: @@ -389,12 +390,14 @@ def __init__( elif use_mtls_env == "always": api_endpoint = self.DEFAULT_MTLS_ENDPOINT elif use_mtls_env == "auto": - api_endpoint = ( - self.DEFAULT_MTLS_ENDPOINT if is_mtls else self.DEFAULT_ENDPOINT - ) + if is_mtls: + api_endpoint = self.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = self.DEFAULT_ENDPOINT else: raise MutualTLSChannelError( - "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted values: never, auto, always" + "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted " + "values: never, auto, always" ) # Save or instantiate the transport. @@ -409,8 +412,8 @@ def __init__( ) if client_options.scopes: raise ValueError( - "When providing a transport instance, " - "provide its scopes directly." + "When providing a transport instance, provide its scopes " + "directly." ) self._transport = transport else: @@ -449,7 +452,6 @@ def list_services( This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -481,10 +483,8 @@ def list_services( # there are no flattened fields. if not isinstance(request, metastore.ListServicesRequest): request = metastore.ListServicesRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent @@ -534,7 +534,6 @@ def get_service( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -563,10 +562,8 @@ def get_service( # there are no flattened fields. if not isinstance(request, metastore.GetServiceRequest): request = metastore.GetServiceRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -637,7 +634,6 @@ def create_service( This corresponds to the ``service_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -669,10 +665,8 @@ def create_service( # there are no flattened fields. if not isinstance(request, metastore.CreateServiceRequest): request = metastore.CreateServiceRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent if service is not None: @@ -709,7 +703,7 @@ def update_service( request: metastore.UpdateServiceRequest = None, *, service: metastore.Service = None, - update_mask: field_mask.FieldMask = None, + update_mask: field_mask_pb2.FieldMask = None, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), @@ -741,7 +735,6 @@ def update_service( This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -773,10 +766,8 @@ def update_service( # there are no flattened fields. if not isinstance(request, metastore.UpdateServiceRequest): request = metastore.UpdateServiceRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if service is not None: request.service = service if update_mask is not None: @@ -832,7 +823,6 @@ def delete_service( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -874,10 +864,8 @@ def delete_service( # there are no flattened fields. if not isinstance(request, metastore.DeleteServiceRequest): request = metastore.DeleteServiceRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -898,7 +886,7 @@ def delete_service( response = operation.from_gapic( response, self._transport.operations_client, - empty.Empty, + empty_pb2.Empty, metadata_type=metastore.OperationMetadata, ) @@ -929,7 +917,6 @@ def list_metadata_imports( This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -961,10 +948,8 @@ def list_metadata_imports( # there are no flattened fields. if not isinstance(request, metastore.ListMetadataImportsRequest): request = metastore.ListMetadataImportsRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent @@ -1014,7 +999,6 @@ def get_metadata_import( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1043,10 +1027,8 @@ def get_metadata_import( # there are no flattened fields. if not isinstance(request, metastore.GetMetadataImportRequest): request = metastore.GetMetadataImportRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -1116,7 +1098,6 @@ def create_metadata_import( This corresponds to the ``metadata_import_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1148,10 +1129,8 @@ def create_metadata_import( # there are no flattened fields. if not isinstance(request, metastore.CreateMetadataImportRequest): request = metastore.CreateMetadataImportRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent if metadata_import is not None: @@ -1188,7 +1167,7 @@ def update_metadata_import( request: metastore.UpdateMetadataImportRequest = None, *, metadata_import: metastore.MetadataImport = None, - update_mask: field_mask.FieldMask = None, + update_mask: field_mask_pb2.FieldMask = None, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), @@ -1222,7 +1201,6 @@ def update_metadata_import( This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1254,10 +1232,8 @@ def update_metadata_import( # there are no flattened fields. if not isinstance(request, metastore.UpdateMetadataImportRequest): request = metastore.UpdateMetadataImportRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if metadata_import is not None: request.metadata_import = metadata_import if update_mask is not None: @@ -1303,7 +1279,6 @@ def export_metadata( request (google.cloud.metastore_v1alpha.types.ExportMetadataRequest): The request object. Request message for [DataprocMetastore.ExportMetadata][google.cloud.metastore.v1alpha.DataprocMetastore.ExportMetadata]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1320,7 +1295,6 @@ def export_metadata( """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes # in a metastore.ExportMetadataRequest. # There's no risk of modifying the input as we've already verified @@ -1386,7 +1360,6 @@ def restore_service( This corresponds to the ``backup`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1418,10 +1391,8 @@ def restore_service( # there are no flattened fields. if not isinstance(request, metastore.RestoreServiceRequest): request = metastore.RestoreServiceRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if service is not None: request.service = service if backup is not None: @@ -1475,7 +1446,6 @@ def list_backups( This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1507,10 +1477,8 @@ def list_backups( # there are no flattened fields. if not isinstance(request, metastore.ListBackupsRequest): request = metastore.ListBackupsRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent @@ -1560,7 +1528,6 @@ def get_backup( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1587,10 +1554,8 @@ def get_backup( # there are no flattened fields. if not isinstance(request, metastore.GetBackupRequest): request = metastore.GetBackupRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -1657,7 +1622,6 @@ def create_backup( This corresponds to the ``backup_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1689,10 +1653,8 @@ def create_backup( # there are no flattened fields. if not isinstance(request, metastore.CreateBackupRequest): request = metastore.CreateBackupRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent if backup is not None: @@ -1748,7 +1710,6 @@ def delete_backup( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1790,10 +1751,8 @@ def delete_backup( # there are no flattened fields. if not isinstance(request, metastore.DeleteBackupRequest): request = metastore.DeleteBackupRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -1814,7 +1773,7 @@ def delete_backup( response = operation.from_gapic( response, self._transport.operations_client, - empty.Empty, + empty_pb2.Empty, metadata_type=metastore.OperationMetadata, ) diff --git a/google/cloud/metastore_v1alpha/services/dataproc_metastore/pagers.py b/google/cloud/metastore_v1alpha/services/dataproc_metastore/pagers.py index e9084e7..459b78f 100644 --- a/google/cloud/metastore_v1alpha/services/dataproc_metastore/pagers.py +++ b/google/cloud/metastore_v1alpha/services/dataproc_metastore/pagers.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from typing import ( Any, AsyncIterable, @@ -117,7 +115,7 @@ def __init__( *, metadata: Sequence[Tuple[str, str]] = () ): - """Instantiate the pager. + """Instantiates the pager. Args: method (Callable): The method that was originally called, and @@ -245,7 +243,7 @@ def __init__( *, metadata: Sequence[Tuple[str, str]] = () ): - """Instantiate the pager. + """Instantiates the pager. Args: method (Callable): The method that was originally called, and @@ -373,7 +371,7 @@ def __init__( *, metadata: Sequence[Tuple[str, str]] = () ): - """Instantiate the pager. + """Instantiates the pager. Args: method (Callable): The method that was originally called, and diff --git a/google/cloud/metastore_v1alpha/services/dataproc_metastore/transports/__init__.py b/google/cloud/metastore_v1alpha/services/dataproc_metastore/transports/__init__.py index fa2687a..2f902e5 100644 --- a/google/cloud/metastore_v1alpha/services/dataproc_metastore/transports/__init__.py +++ b/google/cloud/metastore_v1alpha/services/dataproc_metastore/transports/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from collections import OrderedDict from typing import Dict, Type diff --git a/google/cloud/metastore_v1alpha/services/dataproc_metastore/transports/base.py b/google/cloud/metastore_v1alpha/services/dataproc_metastore/transports/base.py index 12f280e..b74f098 100644 --- a/google/cloud/metastore_v1alpha/services/dataproc_metastore/transports/base.py +++ b/google/cloud/metastore_v1alpha/services/dataproc_metastore/transports/base.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,21 +13,21 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import abc -import typing +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union +import packaging.version import pkg_resources -from google import auth # type: ignore -from google.api_core import exceptions # type: ignore +import google.auth # type: ignore +import google.api_core # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore from google.api_core import operations_v1 # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.cloud.metastore_v1alpha.types import metastore -from google.longrunning import operations_pb2 as operations # type: ignore - +from google.longrunning import operations_pb2 # type: ignore try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( @@ -37,27 +36,41 @@ except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +try: + # google.auth.__version__ was added in 1.26.0 + _GOOGLE_AUTH_VERSION = google.auth.__version__ +except AttributeError: + try: # try pkg_resources if it is available + _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version + except pkg_resources.DistributionNotFound: # pragma: NO COVER + _GOOGLE_AUTH_VERSION = None + +_API_CORE_VERSION = google.api_core.__version__ + class DataprocMetastoreTransport(abc.ABC): """Abstract transport class for DataprocMetastore.""" AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + DEFAULT_HOST: str = "metastore.googleapis.com" + def __init__( self, *, - host: str = "metastore.googleapis.com", - credentials: credentials.Credentials = None, - credentials_file: typing.Optional[str] = None, - scopes: typing.Optional[typing.Sequence[str]] = AUTH_SCOPES, - quota_project_id: typing.Optional[str] = None, + host: str = DEFAULT_HOST, + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, **kwargs, ) -> None: """Instantiate the transport. Args: - host (Optional[str]): The hostname to connect to. + host (Optional[str]): + The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -66,7 +79,7 @@ def __init__( credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. This argument is mutually exclusive with credentials. - scope (Optional[Sequence[str]]): A list of scopes. + scopes (Optional[Sequence[str]]): A list of scopes. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -80,29 +93,76 @@ def __init__( host += ":443" self._host = host + scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + # Save the scopes. self._scopes = scopes or self.AUTH_SCOPES # If no credentials are provided, then determine the appropriate # defaults. if credentials and credentials_file: - raise exceptions.DuplicateCredentialArgs( + raise core_exceptions.DuplicateCredentialArgs( "'credentials_file' and 'credentials' are mutually exclusive" ) if credentials_file is not None: - credentials, _ = auth.load_credentials_from_file( - credentials_file, scopes=self._scopes, quota_project_id=quota_project_id + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id ) elif credentials is None: - credentials, _ = auth.default( - scopes=self._scopes, quota_project_id=quota_project_id + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id ) # Save the credentials. self._credentials = credentials + # TODO(busunkim): These two class methods are in the base transport + # to avoid duplicating code across the transport classes. These functions + # should be deleted once the minimum required versions of google-api-core + # and google-auth are increased. + + # TODO: Remove this function once google-auth >= 1.25.0 is required + @classmethod + def _get_scopes_kwargs( + cls, host: str, scopes: Optional[Sequence[str]] + ) -> Dict[str, Optional[Sequence[str]]]: + """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" + + scopes_kwargs = {} + + if _GOOGLE_AUTH_VERSION and ( + packaging.version.parse(_GOOGLE_AUTH_VERSION) + >= packaging.version.parse("1.25.0") + ): + scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} + else: + scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} + + return scopes_kwargs + + # TODO: Remove this function once google-api-core >= 1.26.0 is required + @classmethod + def _get_self_signed_jwt_kwargs( + cls, host: str, scopes: Optional[Sequence[str]] + ) -> Dict[str, Union[Optional[Sequence[str]], str]]: + """Returns kwargs to pass to grpc_helpers.create_channel depending on the google-api-core version""" + + self_signed_jwt_kwargs: Dict[str, Union[Optional[Sequence[str]], str]] = {} + + if _API_CORE_VERSION and ( + packaging.version.parse(_API_CORE_VERSION) + >= packaging.version.parse("1.26.0") + ): + self_signed_jwt_kwargs["default_scopes"] = cls.AUTH_SCOPES + self_signed_jwt_kwargs["scopes"] = scopes + self_signed_jwt_kwargs["default_host"] = cls.DEFAULT_HOST + else: + self_signed_jwt_kwargs["scopes"] = scopes or cls.AUTH_SCOPES + + return self_signed_jwt_kwargs + def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -167,11 +227,10 @@ def operations_client(self) -> operations_v1.OperationsClient: @property def list_services( self, - ) -> typing.Callable[ + ) -> Callable[ [metastore.ListServicesRequest], - typing.Union[ - metastore.ListServicesResponse, - typing.Awaitable[metastore.ListServicesResponse], + Union[ + metastore.ListServicesResponse, Awaitable[metastore.ListServicesResponse] ], ]: raise NotImplementedError() @@ -179,47 +238,47 @@ def list_services( @property def get_service( self, - ) -> typing.Callable[ + ) -> Callable[ [metastore.GetServiceRequest], - typing.Union[metastore.Service, typing.Awaitable[metastore.Service]], + Union[metastore.Service, Awaitable[metastore.Service]], ]: raise NotImplementedError() @property def create_service( self, - ) -> typing.Callable[ + ) -> Callable[ [metastore.CreateServiceRequest], - typing.Union[operations.Operation, typing.Awaitable[operations.Operation]], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], ]: raise NotImplementedError() @property def update_service( self, - ) -> typing.Callable[ + ) -> Callable[ [metastore.UpdateServiceRequest], - typing.Union[operations.Operation, typing.Awaitable[operations.Operation]], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], ]: raise NotImplementedError() @property def delete_service( self, - ) -> typing.Callable[ + ) -> Callable[ [metastore.DeleteServiceRequest], - typing.Union[operations.Operation, typing.Awaitable[operations.Operation]], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], ]: raise NotImplementedError() @property def list_metadata_imports( self, - ) -> typing.Callable[ + ) -> Callable[ [metastore.ListMetadataImportsRequest], - typing.Union[ + Union[ metastore.ListMetadataImportsResponse, - typing.Awaitable[metastore.ListMetadataImportsResponse], + Awaitable[metastore.ListMetadataImportsResponse], ], ]: raise NotImplementedError() @@ -227,86 +286,81 @@ def list_metadata_imports( @property def get_metadata_import( self, - ) -> typing.Callable[ + ) -> Callable[ [metastore.GetMetadataImportRequest], - typing.Union[ - metastore.MetadataImport, typing.Awaitable[metastore.MetadataImport] - ], + Union[metastore.MetadataImport, Awaitable[metastore.MetadataImport]], ]: raise NotImplementedError() @property def create_metadata_import( self, - ) -> typing.Callable[ + ) -> Callable[ [metastore.CreateMetadataImportRequest], - typing.Union[operations.Operation, typing.Awaitable[operations.Operation]], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], ]: raise NotImplementedError() @property def update_metadata_import( self, - ) -> typing.Callable[ + ) -> Callable[ [metastore.UpdateMetadataImportRequest], - typing.Union[operations.Operation, typing.Awaitable[operations.Operation]], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], ]: raise NotImplementedError() @property def export_metadata( self, - ) -> typing.Callable[ + ) -> Callable[ [metastore.ExportMetadataRequest], - typing.Union[operations.Operation, typing.Awaitable[operations.Operation]], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], ]: raise NotImplementedError() @property def restore_service( self, - ) -> typing.Callable[ + ) -> Callable[ [metastore.RestoreServiceRequest], - typing.Union[operations.Operation, typing.Awaitable[operations.Operation]], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], ]: raise NotImplementedError() @property def list_backups( self, - ) -> typing.Callable[ + ) -> Callable[ [metastore.ListBackupsRequest], - typing.Union[ - metastore.ListBackupsResponse, - typing.Awaitable[metastore.ListBackupsResponse], - ], + Union[metastore.ListBackupsResponse, Awaitable[metastore.ListBackupsResponse]], ]: raise NotImplementedError() @property def get_backup( self, - ) -> typing.Callable[ + ) -> Callable[ [metastore.GetBackupRequest], - typing.Union[metastore.Backup, typing.Awaitable[metastore.Backup]], + Union[metastore.Backup, Awaitable[metastore.Backup]], ]: raise NotImplementedError() @property def create_backup( self, - ) -> typing.Callable[ + ) -> Callable[ [metastore.CreateBackupRequest], - typing.Union[operations.Operation, typing.Awaitable[operations.Operation]], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], ]: raise NotImplementedError() @property def delete_backup( self, - ) -> typing.Callable[ + ) -> Callable[ [metastore.DeleteBackupRequest], - typing.Union[operations.Operation, typing.Awaitable[operations.Operation]], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], ]: raise NotImplementedError() diff --git a/google/cloud/metastore_v1alpha/services/dataproc_metastore/transports/grpc.py b/google/cloud/metastore_v1alpha/services/dataproc_metastore/transports/grpc.py index b40424e..75a7006 100644 --- a/google/cloud/metastore_v1alpha/services/dataproc_metastore/transports/grpc.py +++ b/google/cloud/metastore_v1alpha/services/dataproc_metastore/transports/grpc.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,22 +13,20 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple +from typing import Callable, Dict, Optional, Sequence, Tuple, Union from google.api_core import grpc_helpers # type: ignore from google.api_core import operations_v1 # type: ignore from google.api_core import gapic_v1 # type: ignore -from google import auth # type: ignore -from google.auth import credentials # type: ignore +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore import grpc # type: ignore from google.cloud.metastore_v1alpha.types import metastore -from google.longrunning import operations_pb2 as operations # type: ignore - +from google.longrunning import operations_pb2 # type: ignore from .base import DataprocMetastoreTransport, DEFAULT_CLIENT_INFO @@ -74,7 +71,7 @@ def __init__( self, *, host: str = "metastore.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: str = None, scopes: Sequence[str] = None, channel: grpc.Channel = None, @@ -88,7 +85,8 @@ def __init__( """Instantiate the transport. Args: - host (Optional[str]): The hostname to connect to. + host (Optional[str]): + The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -199,7 +197,7 @@ def __init__( def create_channel( cls, host: str = "metastore.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: str = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, @@ -230,13 +228,15 @@ def create_channel( google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ - scopes = scopes or cls.AUTH_SCOPES + + self_signed_jwt_kwargs = cls._get_self_signed_jwt_kwargs(host, scopes) + return grpc_helpers.create_channel( host, credentials=credentials, credentials_file=credentials_file, - scopes=scopes, quota_project_id=quota_project_id, + **self_signed_jwt_kwargs, **kwargs, ) @@ -313,7 +313,7 @@ def get_service(self) -> Callable[[metastore.GetServiceRequest], metastore.Servi @property def create_service( self, - ) -> Callable[[metastore.CreateServiceRequest], operations.Operation]: + ) -> Callable[[metastore.CreateServiceRequest], operations_pb2.Operation]: r"""Return a callable for the create service method over gRPC. Creates a metastore service in a project and @@ -333,14 +333,14 @@ def create_service( self._stubs["create_service"] = self.grpc_channel.unary_unary( "/google.cloud.metastore.v1alpha.DataprocMetastore/CreateService", request_serializer=metastore.CreateServiceRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["create_service"] @property def update_service( self, - ) -> Callable[[metastore.UpdateServiceRequest], operations.Operation]: + ) -> Callable[[metastore.UpdateServiceRequest], operations_pb2.Operation]: r"""Return a callable for the update service method over gRPC. Updates the parameters of a single service. @@ -359,14 +359,14 @@ def update_service( self._stubs["update_service"] = self.grpc_channel.unary_unary( "/google.cloud.metastore.v1alpha.DataprocMetastore/UpdateService", request_serializer=metastore.UpdateServiceRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["update_service"] @property def delete_service( self, - ) -> Callable[[metastore.DeleteServiceRequest], operations.Operation]: + ) -> Callable[[metastore.DeleteServiceRequest], operations_pb2.Operation]: r"""Return a callable for the delete service method over gRPC. Deletes a single service. @@ -385,7 +385,7 @@ def delete_service( self._stubs["delete_service"] = self.grpc_channel.unary_unary( "/google.cloud.metastore.v1alpha.DataprocMetastore/DeleteService", request_serializer=metastore.DeleteServiceRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["delete_service"] @@ -446,7 +446,7 @@ def get_metadata_import( @property def create_metadata_import( self, - ) -> Callable[[metastore.CreateMetadataImportRequest], operations.Operation]: + ) -> Callable[[metastore.CreateMetadataImportRequest], operations_pb2.Operation]: r"""Return a callable for the create metadata import method over gRPC. Creates a new MetadataImport in a given project and @@ -466,14 +466,14 @@ def create_metadata_import( self._stubs["create_metadata_import"] = self.grpc_channel.unary_unary( "/google.cloud.metastore.v1alpha.DataprocMetastore/CreateMetadataImport", request_serializer=metastore.CreateMetadataImportRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["create_metadata_import"] @property def update_metadata_import( self, - ) -> Callable[[metastore.UpdateMetadataImportRequest], operations.Operation]: + ) -> Callable[[metastore.UpdateMetadataImportRequest], operations_pb2.Operation]: r"""Return a callable for the update metadata import method over gRPC. Updates a single import. @@ -494,14 +494,14 @@ def update_metadata_import( self._stubs["update_metadata_import"] = self.grpc_channel.unary_unary( "/google.cloud.metastore.v1alpha.DataprocMetastore/UpdateMetadataImport", request_serializer=metastore.UpdateMetadataImportRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["update_metadata_import"] @property def export_metadata( self, - ) -> Callable[[metastore.ExportMetadataRequest], operations.Operation]: + ) -> Callable[[metastore.ExportMetadataRequest], operations_pb2.Operation]: r"""Return a callable for the export metadata method over gRPC. Exports metadata from a service. @@ -520,14 +520,14 @@ def export_metadata( self._stubs["export_metadata"] = self.grpc_channel.unary_unary( "/google.cloud.metastore.v1alpha.DataprocMetastore/ExportMetadata", request_serializer=metastore.ExportMetadataRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["export_metadata"] @property def restore_service( self, - ) -> Callable[[metastore.RestoreServiceRequest], operations.Operation]: + ) -> Callable[[metastore.RestoreServiceRequest], operations_pb2.Operation]: r"""Return a callable for the restore service method over gRPC. Restores a service from a backup. @@ -546,7 +546,7 @@ def restore_service( self._stubs["restore_service"] = self.grpc_channel.unary_unary( "/google.cloud.metastore.v1alpha.DataprocMetastore/RestoreService", request_serializer=metastore.RestoreServiceRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["restore_service"] @@ -603,7 +603,7 @@ def get_backup(self) -> Callable[[metastore.GetBackupRequest], metastore.Backup] @property def create_backup( self, - ) -> Callable[[metastore.CreateBackupRequest], operations.Operation]: + ) -> Callable[[metastore.CreateBackupRequest], operations_pb2.Operation]: r"""Return a callable for the create backup method over gRPC. Creates a new Backup in a given project and location. @@ -622,14 +622,14 @@ def create_backup( self._stubs["create_backup"] = self.grpc_channel.unary_unary( "/google.cloud.metastore.v1alpha.DataprocMetastore/CreateBackup", request_serializer=metastore.CreateBackupRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["create_backup"] @property def delete_backup( self, - ) -> Callable[[metastore.DeleteBackupRequest], operations.Operation]: + ) -> Callable[[metastore.DeleteBackupRequest], operations_pb2.Operation]: r"""Return a callable for the delete backup method over gRPC. Deletes a single backup. @@ -648,7 +648,7 @@ def delete_backup( self._stubs["delete_backup"] = self.grpc_channel.unary_unary( "/google.cloud.metastore.v1alpha.DataprocMetastore/DeleteBackup", request_serializer=metastore.DeleteBackupRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["delete_backup"] diff --git a/google/cloud/metastore_v1alpha/services/dataproc_metastore/transports/grpc_asyncio.py b/google/cloud/metastore_v1alpha/services/dataproc_metastore/transports/grpc_asyncio.py index 61b9514..cd5289f 100644 --- a/google/cloud/metastore_v1alpha/services/dataproc_metastore/transports/grpc_asyncio.py +++ b/google/cloud/metastore_v1alpha/services/dataproc_metastore/transports/grpc_asyncio.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,23 +13,21 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union from google.api_core import gapic_v1 # type: ignore from google.api_core import grpc_helpers_async # type: ignore from google.api_core import operations_v1 # type: ignore -from google import auth # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +import packaging.version import grpc # type: ignore from grpc.experimental import aio # type: ignore from google.cloud.metastore_v1alpha.types import metastore -from google.longrunning import operations_pb2 as operations # type: ignore - +from google.longrunning import operations_pb2 # type: ignore from .base import DataprocMetastoreTransport, DEFAULT_CLIENT_INFO from .grpc import DataprocMetastoreGrpcTransport @@ -77,7 +74,7 @@ class DataprocMetastoreGrpcAsyncIOTransport(DataprocMetastoreTransport): def create_channel( cls, host: str = "metastore.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, @@ -104,13 +101,15 @@ def create_channel( Returns: aio.Channel: A gRPC AsyncIO channel object. """ - scopes = scopes or cls.AUTH_SCOPES + + self_signed_jwt_kwargs = cls._get_self_signed_jwt_kwargs(host, scopes) + return grpc_helpers_async.create_channel( host, credentials=credentials, credentials_file=credentials_file, - scopes=scopes, quota_project_id=quota_project_id, + **self_signed_jwt_kwargs, **kwargs, ) @@ -118,7 +117,7 @@ def __init__( self, *, host: str = "metastore.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, channel: aio.Channel = None, @@ -132,7 +131,8 @@ def __init__( """Instantiate the transport. Args: - host (Optional[str]): The hostname to connect to. + host (Optional[str]): + The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -191,7 +191,6 @@ def __init__( # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None - else: if api_mtls_endpoint: host = api_mtls_endpoint @@ -323,7 +322,9 @@ def get_service( @property def create_service( self, - ) -> Callable[[metastore.CreateServiceRequest], Awaitable[operations.Operation]]: + ) -> Callable[ + [metastore.CreateServiceRequest], Awaitable[operations_pb2.Operation] + ]: r"""Return a callable for the create service method over gRPC. Creates a metastore service in a project and @@ -343,14 +344,16 @@ def create_service( self._stubs["create_service"] = self.grpc_channel.unary_unary( "/google.cloud.metastore.v1alpha.DataprocMetastore/CreateService", request_serializer=metastore.CreateServiceRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["create_service"] @property def update_service( self, - ) -> Callable[[metastore.UpdateServiceRequest], Awaitable[operations.Operation]]: + ) -> Callable[ + [metastore.UpdateServiceRequest], Awaitable[operations_pb2.Operation] + ]: r"""Return a callable for the update service method over gRPC. Updates the parameters of a single service. @@ -369,14 +372,16 @@ def update_service( self._stubs["update_service"] = self.grpc_channel.unary_unary( "/google.cloud.metastore.v1alpha.DataprocMetastore/UpdateService", request_serializer=metastore.UpdateServiceRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["update_service"] @property def delete_service( self, - ) -> Callable[[metastore.DeleteServiceRequest], Awaitable[operations.Operation]]: + ) -> Callable[ + [metastore.DeleteServiceRequest], Awaitable[operations_pb2.Operation] + ]: r"""Return a callable for the delete service method over gRPC. Deletes a single service. @@ -395,7 +400,7 @@ def delete_service( self._stubs["delete_service"] = self.grpc_channel.unary_unary( "/google.cloud.metastore.v1alpha.DataprocMetastore/DeleteService", request_serializer=metastore.DeleteServiceRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["delete_service"] @@ -460,7 +465,7 @@ def get_metadata_import( def create_metadata_import( self, ) -> Callable[ - [metastore.CreateMetadataImportRequest], Awaitable[operations.Operation] + [metastore.CreateMetadataImportRequest], Awaitable[operations_pb2.Operation] ]: r"""Return a callable for the create metadata import method over gRPC. @@ -481,7 +486,7 @@ def create_metadata_import( self._stubs["create_metadata_import"] = self.grpc_channel.unary_unary( "/google.cloud.metastore.v1alpha.DataprocMetastore/CreateMetadataImport", request_serializer=metastore.CreateMetadataImportRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["create_metadata_import"] @@ -489,7 +494,7 @@ def create_metadata_import( def update_metadata_import( self, ) -> Callable[ - [metastore.UpdateMetadataImportRequest], Awaitable[operations.Operation] + [metastore.UpdateMetadataImportRequest], Awaitable[operations_pb2.Operation] ]: r"""Return a callable for the update metadata import method over gRPC. @@ -511,14 +516,16 @@ def update_metadata_import( self._stubs["update_metadata_import"] = self.grpc_channel.unary_unary( "/google.cloud.metastore.v1alpha.DataprocMetastore/UpdateMetadataImport", request_serializer=metastore.UpdateMetadataImportRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["update_metadata_import"] @property def export_metadata( self, - ) -> Callable[[metastore.ExportMetadataRequest], Awaitable[operations.Operation]]: + ) -> Callable[ + [metastore.ExportMetadataRequest], Awaitable[operations_pb2.Operation] + ]: r"""Return a callable for the export metadata method over gRPC. Exports metadata from a service. @@ -537,14 +544,16 @@ def export_metadata( self._stubs["export_metadata"] = self.grpc_channel.unary_unary( "/google.cloud.metastore.v1alpha.DataprocMetastore/ExportMetadata", request_serializer=metastore.ExportMetadataRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["export_metadata"] @property def restore_service( self, - ) -> Callable[[metastore.RestoreServiceRequest], Awaitable[operations.Operation]]: + ) -> Callable[ + [metastore.RestoreServiceRequest], Awaitable[operations_pb2.Operation] + ]: r"""Return a callable for the restore service method over gRPC. Restores a service from a backup. @@ -563,7 +572,7 @@ def restore_service( self._stubs["restore_service"] = self.grpc_channel.unary_unary( "/google.cloud.metastore.v1alpha.DataprocMetastore/RestoreService", request_serializer=metastore.RestoreServiceRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["restore_service"] @@ -624,7 +633,7 @@ def get_backup( @property def create_backup( self, - ) -> Callable[[metastore.CreateBackupRequest], Awaitable[operations.Operation]]: + ) -> Callable[[metastore.CreateBackupRequest], Awaitable[operations_pb2.Operation]]: r"""Return a callable for the create backup method over gRPC. Creates a new Backup in a given project and location. @@ -643,14 +652,14 @@ def create_backup( self._stubs["create_backup"] = self.grpc_channel.unary_unary( "/google.cloud.metastore.v1alpha.DataprocMetastore/CreateBackup", request_serializer=metastore.CreateBackupRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["create_backup"] @property def delete_backup( self, - ) -> Callable[[metastore.DeleteBackupRequest], Awaitable[operations.Operation]]: + ) -> Callable[[metastore.DeleteBackupRequest], Awaitable[operations_pb2.Operation]]: r"""Return a callable for the delete backup method over gRPC. Deletes a single backup. @@ -669,7 +678,7 @@ def delete_backup( self._stubs["delete_backup"] = self.grpc_channel.unary_unary( "/google.cloud.metastore.v1alpha.DataprocMetastore/DeleteBackup", request_serializer=metastore.DeleteBackupRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["delete_backup"] diff --git a/google/cloud/metastore_v1alpha/types/__init__.py b/google/cloud/metastore_v1alpha/types/__init__.py index 7914dc7..fa17cfe 100644 --- a/google/cloud/metastore_v1alpha/types/__init__.py +++ b/google/cloud/metastore_v1alpha/types/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from .metastore import ( Backup, CreateBackupRequest, diff --git a/google/cloud/metastore_v1alpha/types/metastore.py b/google/cloud/metastore_v1alpha/types/metastore.py index 47c9825..851091e 100644 --- a/google/cloud/metastore_v1alpha/types/metastore.py +++ b/google/cloud/metastore_v1alpha/types/metastore.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,14 +13,12 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import proto # type: ignore - -from google.protobuf import field_mask_pb2 as field_mask # type: ignore -from google.protobuf import timestamp_pb2 as timestamp # type: ignore -from google.protobuf import wrappers_pb2 as wrappers # type: ignore -from google.type import dayofweek_pb2 as dayofweek # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.protobuf import wrappers_pb2 # type: ignore +from google.type import dayofweek_pb2 # type: ignore __protobuf__ = proto.module( @@ -66,7 +63,6 @@ class Service(proto.Message): r"""A managed metastore service that serves metadata queries. - Attributes: hive_metastore_config (google.cloud.metastore_v1alpha.types.HiveMetastoreConfig): Configuration information specific to running @@ -164,43 +160,27 @@ class ReleaseChannel(proto.Enum): oneof="metastore_config", message="HiveMetastoreConfig", ) - - name = proto.Field(proto.STRING, number=1) - - create_time = proto.Field(proto.MESSAGE, number=2, message=timestamp.Timestamp,) - - update_time = proto.Field(proto.MESSAGE, number=3, message=timestamp.Timestamp,) - - labels = proto.MapField(proto.STRING, proto.STRING, number=4) - - network = proto.Field(proto.STRING, number=7) - - endpoint_uri = proto.Field(proto.STRING, number=8) - - port = proto.Field(proto.INT32, number=9) - + name = proto.Field(proto.STRING, number=1,) + create_time = proto.Field(proto.MESSAGE, number=2, message=timestamp_pb2.Timestamp,) + update_time = proto.Field(proto.MESSAGE, number=3, message=timestamp_pb2.Timestamp,) + labels = proto.MapField(proto.STRING, proto.STRING, number=4,) + network = proto.Field(proto.STRING, number=7,) + endpoint_uri = proto.Field(proto.STRING, number=8,) + port = proto.Field(proto.INT32, number=9,) state = proto.Field(proto.ENUM, number=10, enum=State,) - - state_message = proto.Field(proto.STRING, number=11) - - artifact_gcs_uri = proto.Field(proto.STRING, number=12) - + state_message = proto.Field(proto.STRING, number=11,) + artifact_gcs_uri = proto.Field(proto.STRING, number=12,) tier = proto.Field(proto.ENUM, number=13, enum=Tier,) - metadata_integration = proto.Field( proto.MESSAGE, number=14, message="MetadataIntegration", ) - maintenance_window = proto.Field( proto.MESSAGE, number=15, message="MaintenanceWindow", ) - - uid = proto.Field(proto.STRING, number=16) - + uid = proto.Field(proto.STRING, number=16,) metadata_management_activity = proto.Field( proto.MESSAGE, number=17, message="MetadataManagementActivity", ) - release_channel = proto.Field(proto.ENUM, number=19, enum=ReleaseChannel,) @@ -231,7 +211,7 @@ class DataCatalogConfig(proto.Message): Catalog. """ - enabled = proto.Field(proto.BOOL, number=2) + enabled = proto.Field(proto.BOOL, number=2,) class MaintenanceWindow(proto.Message): @@ -246,9 +226,8 @@ class MaintenanceWindow(proto.Message): The day of week, when the window starts. """ - hour_of_day = proto.Field(proto.MESSAGE, number=1, message=wrappers.Int32Value,) - - day_of_week = proto.Field(proto.ENUM, number=2, enum=dayofweek.DayOfWeek,) + hour_of_day = proto.Field(proto.MESSAGE, number=1, message=wrappers_pb2.Int32Value,) + day_of_week = proto.Field(proto.ENUM, number=2, enum=dayofweek_pb2.DayOfWeek,) class HiveMetastoreConfig(proto.Message): @@ -272,16 +251,13 @@ class HiveMetastoreConfig(proto.Message): the request's ``service``. """ - version = proto.Field(proto.STRING, number=1) - - config_overrides = proto.MapField(proto.STRING, proto.STRING, number=2) - + version = proto.Field(proto.STRING, number=1,) + config_overrides = proto.MapField(proto.STRING, proto.STRING, number=2,) kerberos_config = proto.Field(proto.MESSAGE, number=3, message="KerberosConfig",) class KerberosConfig(proto.Message): r"""Configuration information for a Kerberos principal. - Attributes: keytab (google.cloud.metastore_v1alpha.types.Secret): A Kerberos keytab file that can be used to @@ -299,15 +275,12 @@ class KerberosConfig(proto.Message): """ keytab = proto.Field(proto.MESSAGE, number=1, message="Secret",) - - principal = proto.Field(proto.STRING, number=2) - - krb5_config_gcs_uri = proto.Field(proto.STRING, number=3) + principal = proto.Field(proto.STRING, number=2,) + krb5_config_gcs_uri = proto.Field(proto.STRING, number=3,) class Secret(proto.Message): r"""A securely stored value. - Attributes: cloud_secret (str): The relative resource name of a Secret Manager secret @@ -316,12 +289,11 @@ class Secret(proto.Message): ``projects/{project_number}/secrets/{secret_id}/versions/{version_id}``. """ - cloud_secret = proto.Field(proto.STRING, number=2, oneof="value") + cloud_secret = proto.Field(proto.STRING, number=2, oneof="value",) class MetadataManagementActivity(proto.Message): r"""The metadata management activities of the metastore service. - Attributes: metadata_exports (Sequence[google.cloud.metastore_v1alpha.types.MetadataExport]): Output only. The latest metadata exports of @@ -334,13 +306,11 @@ class MetadataManagementActivity(proto.Message): metadata_exports = proto.RepeatedField( proto.MESSAGE, number=1, message="MetadataExport", ) - restores = proto.RepeatedField(proto.MESSAGE, number=2, message="Restore",) class MetadataImport(proto.Message): r"""A metastore resource that imports metadata. - Attributes: database_dump (google.cloud.metastore_v1alpha.types.MetadataImport.DatabaseDump): Immutable. A database dump from a pre- @@ -397,31 +367,22 @@ class DatabaseType(proto.Enum): database_type = proto.Field( proto.ENUM, number=1, enum="MetadataImport.DatabaseDump.DatabaseType", ) - - gcs_uri = proto.Field(proto.STRING, number=2) - - source_database = proto.Field(proto.STRING, number=3) - + gcs_uri = proto.Field(proto.STRING, number=2,) + source_database = proto.Field(proto.STRING, number=3,) type_ = proto.Field(proto.ENUM, number=4, enum="DatabaseDumpSpec.Type",) database_dump = proto.Field( proto.MESSAGE, number=6, oneof="metadata", message=DatabaseDump, ) - - name = proto.Field(proto.STRING, number=1) - - description = proto.Field(proto.STRING, number=2) - - create_time = proto.Field(proto.MESSAGE, number=3, message=timestamp.Timestamp,) - - update_time = proto.Field(proto.MESSAGE, number=4, message=timestamp.Timestamp,) - + name = proto.Field(proto.STRING, number=1,) + description = proto.Field(proto.STRING, number=2,) + create_time = proto.Field(proto.MESSAGE, number=3, message=timestamp_pb2.Timestamp,) + update_time = proto.Field(proto.MESSAGE, number=4, message=timestamp_pb2.Timestamp,) state = proto.Field(proto.ENUM, number=5, enum=State,) class MetadataExport(proto.Message): r"""The details of a metadata export operation. - Attributes: destination_gcs_uri (str): Output only. A Cloud Storage URI of a folder that metadata @@ -447,14 +408,10 @@ class State(proto.Enum): FAILED = 3 CANCELLED = 4 - destination_gcs_uri = proto.Field(proto.STRING, number=4, oneof="destination") - - start_time = proto.Field(proto.MESSAGE, number=1, message=timestamp.Timestamp,) - - end_time = proto.Field(proto.MESSAGE, number=2, message=timestamp.Timestamp,) - + destination_gcs_uri = proto.Field(proto.STRING, number=4, oneof="destination",) + start_time = proto.Field(proto.MESSAGE, number=1, message=timestamp_pb2.Timestamp,) + end_time = proto.Field(proto.MESSAGE, number=2, message=timestamp_pb2.Timestamp,) state = proto.Field(proto.ENUM, number=3, enum=State,) - database_dump_type = proto.Field( proto.ENUM, number=5, enum="DatabaseDumpSpec.Type", ) @@ -462,7 +419,6 @@ class State(proto.Enum): class Backup(proto.Message): r"""The details of a backup resource. - Attributes: name (str): Immutable. The relative resource name of the backup, in the @@ -492,22 +448,16 @@ class State(proto.Enum): ACTIVE = 3 FAILED = 4 - name = proto.Field(proto.STRING, number=1) - - create_time = proto.Field(proto.MESSAGE, number=2, message=timestamp.Timestamp,) - - end_time = proto.Field(proto.MESSAGE, number=3, message=timestamp.Timestamp,) - + name = proto.Field(proto.STRING, number=1,) + create_time = proto.Field(proto.MESSAGE, number=2, message=timestamp_pb2.Timestamp,) + end_time = proto.Field(proto.MESSAGE, number=3, message=timestamp_pb2.Timestamp,) state = proto.Field(proto.ENUM, number=4, enum=State,) - service_revision = proto.Field(proto.MESSAGE, number=5, message="Service",) - - description = proto.Field(proto.STRING, number=6) + description = proto.Field(proto.STRING, number=6,) class Restore(proto.Message): r"""The details of a metadata restore operation. - Attributes: start_time (google.protobuf.timestamp_pb2.Timestamp): Output only. The time when the restore @@ -544,17 +494,12 @@ class RestoreType(proto.Enum): FULL = 1 METADATA_ONLY = 2 - start_time = proto.Field(proto.MESSAGE, number=1, message=timestamp.Timestamp,) - - end_time = proto.Field(proto.MESSAGE, number=2, message=timestamp.Timestamp,) - + start_time = proto.Field(proto.MESSAGE, number=1, message=timestamp_pb2.Timestamp,) + end_time = proto.Field(proto.MESSAGE, number=2, message=timestamp_pb2.Timestamp,) state = proto.Field(proto.ENUM, number=3, enum=State,) - - backup = proto.Field(proto.STRING, number=4) - + backup = proto.Field(proto.STRING, number=4,) type_ = proto.Field(proto.ENUM, number=5, enum=RestoreType,) - - details = proto.Field(proto.STRING, number=6) + details = proto.Field(proto.STRING, number=6,) class ListServicesRequest(proto.Message): @@ -594,15 +539,11 @@ class ListServicesRequest(proto.Message): order. """ - parent = proto.Field(proto.STRING, number=1) - - page_size = proto.Field(proto.INT32, number=2) - - page_token = proto.Field(proto.STRING, number=3) - - filter = proto.Field(proto.STRING, number=4) - - order_by = proto.Field(proto.STRING, number=5) + parent = proto.Field(proto.STRING, number=1,) + page_size = proto.Field(proto.INT32, number=2,) + page_token = proto.Field(proto.STRING, number=3,) + filter = proto.Field(proto.STRING, number=4,) + order_by = proto.Field(proto.STRING, number=5,) class ListServicesResponse(proto.Message): @@ -625,10 +566,8 @@ def raw_page(self): return self services = proto.RepeatedField(proto.MESSAGE, number=1, message="Service",) - - next_page_token = proto.Field(proto.STRING, number=2) - - unreachable = proto.RepeatedField(proto.STRING, number=3) + next_page_token = proto.Field(proto.STRING, number=2,) + unreachable = proto.RepeatedField(proto.STRING, number=3,) class GetServiceRequest(proto.Message): @@ -643,7 +582,7 @@ class GetServiceRequest(proto.Message): ``projects/{project_number}/locations/{location_id}/services/{service_id}``. """ - name = proto.Field(proto.STRING, number=1) + name = proto.Field(proto.STRING, number=1,) class CreateServiceRequest(proto.Message): @@ -686,13 +625,10 @@ class CreateServiceRequest(proto.Message): supported. """ - parent = proto.Field(proto.STRING, number=1) - - service_id = proto.Field(proto.STRING, number=2) - + parent = proto.Field(proto.STRING, number=1,) + service_id = proto.Field(proto.STRING, number=2,) service = proto.Field(proto.MESSAGE, number=3, message="Service",) - - request_id = proto.Field(proto.STRING, number=4) + request_id = proto.Field(proto.STRING, number=4,) class UpdateServiceRequest(proto.Message): @@ -731,11 +667,11 @@ class UpdateServiceRequest(proto.Message): supported. """ - update_mask = proto.Field(proto.MESSAGE, number=1, message=field_mask.FieldMask,) - + update_mask = proto.Field( + proto.MESSAGE, number=1, message=field_mask_pb2.FieldMask, + ) service = proto.Field(proto.MESSAGE, number=2, message="Service",) - - request_id = proto.Field(proto.STRING, number=3) + request_id = proto.Field(proto.STRING, number=3,) class DeleteServiceRequest(proto.Message): @@ -766,9 +702,8 @@ class DeleteServiceRequest(proto.Message): supported. """ - name = proto.Field(proto.STRING, number=1) - - request_id = proto.Field(proto.STRING, number=2) + name = proto.Field(proto.STRING, number=1,) + request_id = proto.Field(proto.STRING, number=2,) class ListMetadataImportsRequest(proto.Message): @@ -808,15 +743,11 @@ class ListMetadataImportsRequest(proto.Message): order. """ - parent = proto.Field(proto.STRING, number=1) - - page_size = proto.Field(proto.INT32, number=2) - - page_token = proto.Field(proto.STRING, number=3) - - filter = proto.Field(proto.STRING, number=4) - - order_by = proto.Field(proto.STRING, number=5) + parent = proto.Field(proto.STRING, number=1,) + page_size = proto.Field(proto.INT32, number=2,) + page_token = proto.Field(proto.STRING, number=3,) + filter = proto.Field(proto.STRING, number=4,) + order_by = proto.Field(proto.STRING, number=5,) class ListMetadataImportsResponse(proto.Message): @@ -841,10 +772,8 @@ def raw_page(self): metadata_imports = proto.RepeatedField( proto.MESSAGE, number=1, message="MetadataImport", ) - - next_page_token = proto.Field(proto.STRING, number=2) - - unreachable = proto.RepeatedField(proto.STRING, number=3) + next_page_token = proto.Field(proto.STRING, number=2,) + unreachable = proto.RepeatedField(proto.STRING, number=3,) class GetMetadataImportRequest(proto.Message): @@ -859,7 +788,7 @@ class GetMetadataImportRequest(proto.Message): ``projects/{project_number}/locations/{location_id}/services/{service_id}/metadataImports/{import_id}``. """ - name = proto.Field(proto.STRING, number=1) + name = proto.Field(proto.STRING, number=1,) class CreateMetadataImportRequest(proto.Message): @@ -902,13 +831,10 @@ class CreateMetadataImportRequest(proto.Message): supported. """ - parent = proto.Field(proto.STRING, number=1) - - metadata_import_id = proto.Field(proto.STRING, number=2) - + parent = proto.Field(proto.STRING, number=1,) + metadata_import_id = proto.Field(proto.STRING, number=2,) metadata_import = proto.Field(proto.MESSAGE, number=3, message="MetadataImport",) - - request_id = proto.Field(proto.STRING, number=4) + request_id = proto.Field(proto.STRING, number=4,) class UpdateMetadataImportRequest(proto.Message): @@ -947,11 +873,11 @@ class UpdateMetadataImportRequest(proto.Message): supported. """ - update_mask = proto.Field(proto.MESSAGE, number=1, message=field_mask.FieldMask,) - + update_mask = proto.Field( + proto.MESSAGE, number=1, message=field_mask_pb2.FieldMask, + ) metadata_import = proto.Field(proto.MESSAGE, number=2, message="MetadataImport",) - - request_id = proto.Field(proto.STRING, number=3) + request_id = proto.Field(proto.STRING, number=3,) class ListBackupsRequest(proto.Message): @@ -991,15 +917,11 @@ class ListBackupsRequest(proto.Message): order. """ - parent = proto.Field(proto.STRING, number=1) - - page_size = proto.Field(proto.INT32, number=2) - - page_token = proto.Field(proto.STRING, number=3) - - filter = proto.Field(proto.STRING, number=4) - - order_by = proto.Field(proto.STRING, number=5) + parent = proto.Field(proto.STRING, number=1,) + page_size = proto.Field(proto.INT32, number=2,) + page_token = proto.Field(proto.STRING, number=3,) + filter = proto.Field(proto.STRING, number=4,) + order_by = proto.Field(proto.STRING, number=5,) class ListBackupsResponse(proto.Message): @@ -1022,10 +944,8 @@ def raw_page(self): return self backups = proto.RepeatedField(proto.MESSAGE, number=1, message="Backup",) - - next_page_token = proto.Field(proto.STRING, number=2) - - unreachable = proto.RepeatedField(proto.STRING, number=3) + next_page_token = proto.Field(proto.STRING, number=2,) + unreachable = proto.RepeatedField(proto.STRING, number=3,) class GetBackupRequest(proto.Message): @@ -1040,7 +960,7 @@ class GetBackupRequest(proto.Message): ``projects/{project_number}/locations/{location_id}/services/{service_id}/backups/{backup_id}``. """ - name = proto.Field(proto.STRING, number=1) + name = proto.Field(proto.STRING, number=1,) class CreateBackupRequest(proto.Message): @@ -1083,13 +1003,10 @@ class CreateBackupRequest(proto.Message): supported. """ - parent = proto.Field(proto.STRING, number=1) - - backup_id = proto.Field(proto.STRING, number=2) - + parent = proto.Field(proto.STRING, number=1,) + backup_id = proto.Field(proto.STRING, number=2,) backup = proto.Field(proto.MESSAGE, number=3, message="Backup",) - - request_id = proto.Field(proto.STRING, number=4) + request_id = proto.Field(proto.STRING, number=4,) class DeleteBackupRequest(proto.Message): @@ -1120,9 +1037,8 @@ class DeleteBackupRequest(proto.Message): supported. """ - name = proto.Field(proto.STRING, number=1) - - request_id = proto.Field(proto.STRING, number=2) + name = proto.Field(proto.STRING, number=1,) + request_id = proto.Field(proto.STRING, number=2,) class ExportMetadataRequest(proto.Message): @@ -1161,12 +1077,9 @@ class ExportMetadataRequest(proto.Message): defaults to ``MYSQL``. """ - destination_gcs_folder = proto.Field(proto.STRING, number=2, oneof="destination") - - service = proto.Field(proto.STRING, number=1) - - request_id = proto.Field(proto.STRING, number=3) - + destination_gcs_folder = proto.Field(proto.STRING, number=2, oneof="destination",) + service = proto.Field(proto.STRING, number=1,) + request_id = proto.Field(proto.STRING, number=3,) database_dump_type = proto.Field( proto.ENUM, number=4, enum="DatabaseDumpSpec.Type", ) @@ -1174,7 +1087,6 @@ class ExportMetadataRequest(proto.Message): class RestoreServiceRequest(proto.Message): r"""Request message for [DataprocMetastore.Restore][]. - Attributes: service (str): Required. The relative resource name of the metastore @@ -1207,18 +1119,14 @@ class RestoreServiceRequest(proto.Message): supported. """ - service = proto.Field(proto.STRING, number=1) - - backup = proto.Field(proto.STRING, number=2) - + service = proto.Field(proto.STRING, number=1,) + backup = proto.Field(proto.STRING, number=2,) restore_type = proto.Field(proto.ENUM, number=3, enum="Restore.RestoreType",) - - request_id = proto.Field(proto.STRING, number=4) + request_id = proto.Field(proto.STRING, number=4,) class OperationMetadata(proto.Message): r"""Represents the metadata of a long-running operation. - Attributes: create_time (google.protobuf.timestamp_pb2.Timestamp): Output only. The time the operation was @@ -1246,24 +1154,17 @@ class OperationMetadata(proto.Message): operation. """ - create_time = proto.Field(proto.MESSAGE, number=1, message=timestamp.Timestamp,) - - end_time = proto.Field(proto.MESSAGE, number=2, message=timestamp.Timestamp,) - - target = proto.Field(proto.STRING, number=3) - - verb = proto.Field(proto.STRING, number=4) - - status_message = proto.Field(proto.STRING, number=5) - - requested_cancellation = proto.Field(proto.BOOL, number=6) - - api_version = proto.Field(proto.STRING, number=7) + create_time = proto.Field(proto.MESSAGE, number=1, message=timestamp_pb2.Timestamp,) + end_time = proto.Field(proto.MESSAGE, number=2, message=timestamp_pb2.Timestamp,) + target = proto.Field(proto.STRING, number=3,) + verb = proto.Field(proto.STRING, number=4,) + status_message = proto.Field(proto.STRING, number=5,) + requested_cancellation = proto.Field(proto.BOOL, number=6,) + api_version = proto.Field(proto.STRING, number=7,) class LocationMetadata(proto.Message): r"""Metadata about the service in a location. - Attributes: supported_hive_metastore_versions (Sequence[google.cloud.metastore_v1alpha.types.LocationMetadata.HiveMetastoreVersion]): The versions of Hive Metastore that can be used when @@ -1286,9 +1187,8 @@ class HiveMetastoreVersion(proto.Message): that omits the ``version``. """ - version = proto.Field(proto.STRING, number=1) - - is_default = proto.Field(proto.BOOL, number=2) + version = proto.Field(proto.STRING, number=1,) + is_default = proto.Field(proto.BOOL, number=2,) supported_hive_metastore_versions = proto.RepeatedField( proto.MESSAGE, number=1, message=HiveMetastoreVersion, @@ -1298,7 +1198,7 @@ class HiveMetastoreVersion(proto.Message): class DatabaseDumpSpec(proto.Message): r"""The specification of database dump to import from or export to. - """ + """ class Type(proto.Enum): r"""The type of the database dump.""" diff --git a/google/cloud/metastore_v1beta/__init__.py b/google/cloud/metastore_v1beta/__init__.py index a175a8e..ba27b5e 100644 --- a/google/cloud/metastore_v1beta/__init__.py +++ b/google/cloud/metastore_v1beta/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -16,12 +15,14 @@ # from .services.dataproc_metastore import DataprocMetastoreClient +from .services.dataproc_metastore import DataprocMetastoreAsyncClient + from .types.metastore import Backup from .types.metastore import CreateBackupRequest from .types.metastore import CreateMetadataImportRequest from .types.metastore import CreateServiceRequest -from .types.metastore import DataCatalogConfig from .types.metastore import DatabaseDumpSpec +from .types.metastore import DataCatalogConfig from .types.metastore import DeleteBackupRequest from .types.metastore import DeleteServiceRequest from .types.metastore import ExportMetadataRequest @@ -50,14 +51,15 @@ from .types.metastore import UpdateMetadataImportRequest from .types.metastore import UpdateServiceRequest - __all__ = ( + "DataprocMetastoreAsyncClient", "Backup", "CreateBackupRequest", "CreateMetadataImportRequest", "CreateServiceRequest", "DataCatalogConfig", "DatabaseDumpSpec", + "DataprocMetastoreClient", "DeleteBackupRequest", "DeleteServiceRequest", "ExportMetadataRequest", @@ -85,5 +87,4 @@ "Service", "UpdateMetadataImportRequest", "UpdateServiceRequest", - "DataprocMetastoreClient", ) diff --git a/google/cloud/metastore_v1beta/gapic_metadata.json b/google/cloud/metastore_v1beta/gapic_metadata.json new file mode 100644 index 0000000..eca2df9 --- /dev/null +++ b/google/cloud/metastore_v1beta/gapic_metadata.json @@ -0,0 +1,173 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.metastore_v1beta", + "protoPackage": "google.cloud.metastore.v1beta", + "schema": "1.0", + "services": { + "DataprocMetastore": { + "clients": { + "grpc": { + "libraryClient": "DataprocMetastoreClient", + "rpcs": { + "CreateBackup": { + "methods": [ + "create_backup" + ] + }, + "CreateMetadataImport": { + "methods": [ + "create_metadata_import" + ] + }, + "CreateService": { + "methods": [ + "create_service" + ] + }, + "DeleteBackup": { + "methods": [ + "delete_backup" + ] + }, + "DeleteService": { + "methods": [ + "delete_service" + ] + }, + "ExportMetadata": { + "methods": [ + "export_metadata" + ] + }, + "GetBackup": { + "methods": [ + "get_backup" + ] + }, + "GetMetadataImport": { + "methods": [ + "get_metadata_import" + ] + }, + "GetService": { + "methods": [ + "get_service" + ] + }, + "ListBackups": { + "methods": [ + "list_backups" + ] + }, + "ListMetadataImports": { + "methods": [ + "list_metadata_imports" + ] + }, + "ListServices": { + "methods": [ + "list_services" + ] + }, + "RestoreService": { + "methods": [ + "restore_service" + ] + }, + "UpdateMetadataImport": { + "methods": [ + "update_metadata_import" + ] + }, + "UpdateService": { + "methods": [ + "update_service" + ] + } + } + }, + "grpc-async": { + "libraryClient": "DataprocMetastoreAsyncClient", + "rpcs": { + "CreateBackup": { + "methods": [ + "create_backup" + ] + }, + "CreateMetadataImport": { + "methods": [ + "create_metadata_import" + ] + }, + "CreateService": { + "methods": [ + "create_service" + ] + }, + "DeleteBackup": { + "methods": [ + "delete_backup" + ] + }, + "DeleteService": { + "methods": [ + "delete_service" + ] + }, + "ExportMetadata": { + "methods": [ + "export_metadata" + ] + }, + "GetBackup": { + "methods": [ + "get_backup" + ] + }, + "GetMetadataImport": { + "methods": [ + "get_metadata_import" + ] + }, + "GetService": { + "methods": [ + "get_service" + ] + }, + "ListBackups": { + "methods": [ + "list_backups" + ] + }, + "ListMetadataImports": { + "methods": [ + "list_metadata_imports" + ] + }, + "ListServices": { + "methods": [ + "list_services" + ] + }, + "RestoreService": { + "methods": [ + "restore_service" + ] + }, + "UpdateMetadataImport": { + "methods": [ + "update_metadata_import" + ] + }, + "UpdateService": { + "methods": [ + "update_service" + ] + } + } + } + } + } + } +} diff --git a/google/cloud/metastore_v1beta/services/__init__.py b/google/cloud/metastore_v1beta/services/__init__.py index 42ffdf2..4de6597 100644 --- a/google/cloud/metastore_v1beta/services/__init__.py +++ b/google/cloud/metastore_v1beta/services/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/google/cloud/metastore_v1beta/services/dataproc_metastore/__init__.py b/google/cloud/metastore_v1beta/services/dataproc_metastore/__init__.py index 6f26cf1..194ae14 100644 --- a/google/cloud/metastore_v1beta/services/dataproc_metastore/__init__.py +++ b/google/cloud/metastore_v1beta/services/dataproc_metastore/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from .client import DataprocMetastoreClient from .async_client import DataprocMetastoreAsyncClient diff --git a/google/cloud/metastore_v1beta/services/dataproc_metastore/async_client.py b/google/cloud/metastore_v1beta/services/dataproc_metastore/async_client.py index 07a3bed..6e54458 100644 --- a/google/cloud/metastore_v1beta/services/dataproc_metastore/async_client.py +++ b/google/cloud/metastore_v1beta/services/dataproc_metastore/async_client.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from collections import OrderedDict import functools import re @@ -22,20 +20,19 @@ import pkg_resources import google.api_core.client_options as ClientOptions # type: ignore -from google.api_core import exceptions # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore from google.cloud.metastore_v1beta.services.dataproc_metastore import pagers from google.cloud.metastore_v1beta.types import metastore -from google.protobuf import empty_pb2 as empty # type: ignore -from google.protobuf import field_mask_pb2 as field_mask # type: ignore -from google.protobuf import timestamp_pb2 as timestamp # type: ignore - +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore from .transports.base import DataprocMetastoreTransport, DEFAULT_CLIENT_INFO from .transports.grpc_asyncio import DataprocMetastoreGrpcAsyncIOTransport from .client import DataprocMetastoreClient @@ -82,31 +79,26 @@ class DataprocMetastoreAsyncClient: parse_network_path = staticmethod(DataprocMetastoreClient.parse_network_path) service_path = staticmethod(DataprocMetastoreClient.service_path) parse_service_path = staticmethod(DataprocMetastoreClient.parse_service_path) - common_billing_account_path = staticmethod( DataprocMetastoreClient.common_billing_account_path ) parse_common_billing_account_path = staticmethod( DataprocMetastoreClient.parse_common_billing_account_path ) - common_folder_path = staticmethod(DataprocMetastoreClient.common_folder_path) parse_common_folder_path = staticmethod( DataprocMetastoreClient.parse_common_folder_path ) - common_organization_path = staticmethod( DataprocMetastoreClient.common_organization_path ) parse_common_organization_path = staticmethod( DataprocMetastoreClient.parse_common_organization_path ) - common_project_path = staticmethod(DataprocMetastoreClient.common_project_path) parse_common_project_path = staticmethod( DataprocMetastoreClient.parse_common_project_path ) - common_location_path = staticmethod(DataprocMetastoreClient.common_location_path) parse_common_location_path = staticmethod( DataprocMetastoreClient.parse_common_location_path @@ -114,7 +106,8 @@ class DataprocMetastoreAsyncClient: @classmethod def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials info. + """Creates an instance of this client using the provided credentials + info. Args: info (dict): The service account private key info. @@ -129,7 +122,7 @@ def from_service_account_info(cls, info: dict, *args, **kwargs): @classmethod def from_service_account_file(cls, filename: str, *args, **kwargs): """Creates an instance of this client using the provided credentials - file. + file. Args: filename (str): The path to the service account private key json @@ -146,7 +139,7 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): @property def transport(self) -> DataprocMetastoreTransport: - """Return the transport used by the client instance. + """Returns the transport used by the client instance. Returns: DataprocMetastoreTransport: The transport used by the client instance. @@ -160,12 +153,12 @@ def transport(self) -> DataprocMetastoreTransport: def __init__( self, *, - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, transport: Union[str, DataprocMetastoreTransport] = "grpc_asyncio", client_options: ClientOptions = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: - """Instantiate the dataproc metastore client. + """Instantiates the dataproc metastore client. Args: credentials (Optional[google.auth.credentials.Credentials]): The @@ -197,7 +190,6 @@ def __init__( google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport creation failed for any reason. """ - self._client = DataprocMetastoreClient( credentials=credentials, transport=transport, @@ -229,7 +221,6 @@ async def list_services( This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -259,7 +250,6 @@ async def list_services( # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent @@ -313,7 +303,6 @@ async def get_service( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -340,7 +329,6 @@ async def get_service( # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -415,7 +403,6 @@ async def create_service( This corresponds to the ``service_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -445,7 +432,6 @@ async def create_service( # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent if service is not None: @@ -486,7 +472,7 @@ async def update_service( request: metastore.UpdateServiceRequest = None, *, service: metastore.Service = None, - update_mask: field_mask.FieldMask = None, + update_mask: field_mask_pb2.FieldMask = None, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), @@ -518,7 +504,6 @@ async def update_service( This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -548,7 +533,6 @@ async def update_service( # If we have keyword arguments corresponding to fields on the # request, apply these. - if service is not None: request.service = service if update_mask is not None: @@ -608,7 +592,6 @@ async def delete_service( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -648,7 +631,6 @@ async def delete_service( # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -673,7 +655,7 @@ async def delete_service( response = operation_async.from_gapic( response, self._client._transport.operations_client, - empty.Empty, + empty_pb2.Empty, metadata_type=metastore.OperationMetadata, ) @@ -704,7 +686,6 @@ async def list_metadata_imports( This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -734,7 +715,6 @@ async def list_metadata_imports( # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent @@ -788,7 +768,6 @@ async def get_metadata_import( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -815,7 +794,6 @@ async def get_metadata_import( # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -889,7 +867,6 @@ async def create_metadata_import( This corresponds to the ``metadata_import_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -919,7 +896,6 @@ async def create_metadata_import( # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent if metadata_import is not None: @@ -960,7 +936,7 @@ async def update_metadata_import( request: metastore.UpdateMetadataImportRequest = None, *, metadata_import: metastore.MetadataImport = None, - update_mask: field_mask.FieldMask = None, + update_mask: field_mask_pb2.FieldMask = None, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), @@ -994,7 +970,6 @@ async def update_metadata_import( This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1024,7 +999,6 @@ async def update_metadata_import( # If we have keyword arguments corresponding to fields on the # request, apply these. - if metadata_import is not None: request.metadata_import = metadata_import if update_mask is not None: @@ -1074,7 +1048,6 @@ async def export_metadata( request (:class:`google.cloud.metastore_v1beta.types.ExportMetadataRequest`): The request object. Request message for [DataprocMetastore.ExportMetadata][google.cloud.metastore.v1beta.DataprocMetastore.ExportMetadata]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1091,7 +1064,6 @@ async def export_metadata( """ # Create or coerce a protobuf request object. - request = metastore.ExportMetadataRequest(request) # Wrap the RPC method; this adds retry and timeout information, @@ -1156,7 +1128,6 @@ async def restore_service( This corresponds to the ``backup`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1186,7 +1157,6 @@ async def restore_service( # If we have keyword arguments corresponding to fields on the # request, apply these. - if service is not None: request.service = service if backup is not None: @@ -1244,7 +1214,6 @@ async def list_backups( This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1274,7 +1243,6 @@ async def list_backups( # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent @@ -1328,7 +1296,6 @@ async def get_backup( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1353,7 +1320,6 @@ async def get_backup( # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -1424,7 +1390,6 @@ async def create_backup( This corresponds to the ``backup_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1454,7 +1419,6 @@ async def create_backup( # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent if backup is not None: @@ -1514,7 +1478,6 @@ async def delete_backup( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1554,7 +1517,6 @@ async def delete_backup( # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -1579,7 +1541,7 @@ async def delete_backup( response = operation_async.from_gapic( response, self._client._transport.operations_client, - empty.Empty, + empty_pb2.Empty, metadata_type=metastore.OperationMetadata, ) diff --git a/google/cloud/metastore_v1beta/services/dataproc_metastore/client.py b/google/cloud/metastore_v1beta/services/dataproc_metastore/client.py index 7e429a2..15f3403 100644 --- a/google/cloud/metastore_v1beta/services/dataproc_metastore/client.py +++ b/google/cloud/metastore_v1beta/services/dataproc_metastore/client.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from collections import OrderedDict from distutils import util import os @@ -23,10 +21,10 @@ import pkg_resources from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore @@ -36,10 +34,9 @@ from google.api_core import operation_async # type: ignore from google.cloud.metastore_v1beta.services.dataproc_metastore import pagers from google.cloud.metastore_v1beta.types import metastore -from google.protobuf import empty_pb2 as empty # type: ignore -from google.protobuf import field_mask_pb2 as field_mask # type: ignore -from google.protobuf import timestamp_pb2 as timestamp # type: ignore - +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore from .transports.base import DataprocMetastoreTransport, DEFAULT_CLIENT_INFO from .transports.grpc import DataprocMetastoreGrpcTransport from .transports.grpc_asyncio import DataprocMetastoreGrpcAsyncIOTransport @@ -62,7 +59,7 @@ class DataprocMetastoreClientMeta(type): def get_transport_class( cls, label: str = None, ) -> Type[DataprocMetastoreTransport]: - """Return an appropriate transport class. + """Returns an appropriate transport class. Args: label: The name of the desired transport. If none is @@ -108,7 +105,8 @@ class DataprocMetastoreClient(metaclass=DataprocMetastoreClientMeta): @staticmethod def _get_default_mtls_endpoint(api_endpoint): - """Convert api endpoint to mTLS endpoint. + """Converts api endpoint to mTLS endpoint. + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. Args: @@ -142,7 +140,8 @@ def _get_default_mtls_endpoint(api_endpoint): @classmethod def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials info. + """Creates an instance of this client using the provided credentials + info. Args: info (dict): The service account private key info. @@ -159,7 +158,7 @@ def from_service_account_info(cls, info: dict, *args, **kwargs): @classmethod def from_service_account_file(cls, filename: str, *args, **kwargs): """Creates an instance of this client using the provided credentials - file. + file. Args: filename (str): The path to the service account private key json @@ -178,23 +177,24 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): @property def transport(self) -> DataprocMetastoreTransport: - """Return the transport used by the client instance. + """Returns the transport used by the client instance. Returns: - DataprocMetastoreTransport: The transport used by the client instance. + DataprocMetastoreTransport: The transport used by the client + instance. """ return self._transport @staticmethod def backup_path(project: str, location: str, service: str, backup: str,) -> str: - """Return a fully-qualified backup string.""" + """Returns a fully-qualified backup string.""" return "projects/{project}/locations/{location}/services/{service}/backups/{backup}".format( project=project, location=location, service=service, backup=backup, ) @staticmethod def parse_backup_path(path: str) -> Dict[str, str]: - """Parse a backup path into its component segments.""" + """Parses a backup path into its component segments.""" m = re.match( r"^projects/(?P.+?)/locations/(?P.+?)/services/(?P.+?)/backups/(?P.+?)$", path, @@ -205,7 +205,7 @@ def parse_backup_path(path: str) -> Dict[str, str]: def metadata_import_path( project: str, location: str, service: str, metadata_import: str, ) -> str: - """Return a fully-qualified metadata_import string.""" + """Returns a fully-qualified metadata_import string.""" return "projects/{project}/locations/{location}/services/{service}/metadataImports/{metadata_import}".format( project=project, location=location, @@ -215,7 +215,7 @@ def metadata_import_path( @staticmethod def parse_metadata_import_path(path: str) -> Dict[str, str]: - """Parse a metadata_import path into its component segments.""" + """Parses a metadata_import path into its component segments.""" m = re.match( r"^projects/(?P.+?)/locations/(?P.+?)/services/(?P.+?)/metadataImports/(?P.+?)$", path, @@ -224,14 +224,14 @@ def parse_metadata_import_path(path: str) -> Dict[str, str]: @staticmethod def network_path(project: str, network: str,) -> str: - """Return a fully-qualified network string.""" + """Returns a fully-qualified network string.""" return "projects/{project}/global/networks/{network}".format( project=project, network=network, ) @staticmethod def parse_network_path(path: str) -> Dict[str, str]: - """Parse a network path into its component segments.""" + """Parses a network path into its component segments.""" m = re.match( r"^projects/(?P.+?)/global/networks/(?P.+?)$", path ) @@ -239,14 +239,14 @@ def parse_network_path(path: str) -> Dict[str, str]: @staticmethod def service_path(project: str, location: str, service: str,) -> str: - """Return a fully-qualified service string.""" + """Returns a fully-qualified service string.""" return "projects/{project}/locations/{location}/services/{service}".format( project=project, location=location, service=service, ) @staticmethod def parse_service_path(path: str) -> Dict[str, str]: - """Parse a service path into its component segments.""" + """Parses a service path into its component segments.""" m = re.match( r"^projects/(?P.+?)/locations/(?P.+?)/services/(?P.+?)$", path, @@ -255,7 +255,7 @@ def parse_service_path(path: str) -> Dict[str, str]: @staticmethod def common_billing_account_path(billing_account: str,) -> str: - """Return a fully-qualified billing_account string.""" + """Returns a fully-qualified billing_account string.""" return "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -268,7 +268,7 @@ def parse_common_billing_account_path(path: str) -> Dict[str, str]: @staticmethod def common_folder_path(folder: str,) -> str: - """Return a fully-qualified folder string.""" + """Returns a fully-qualified folder string.""" return "folders/{folder}".format(folder=folder,) @staticmethod @@ -279,7 +279,7 @@ def parse_common_folder_path(path: str) -> Dict[str, str]: @staticmethod def common_organization_path(organization: str,) -> str: - """Return a fully-qualified organization string.""" + """Returns a fully-qualified organization string.""" return "organizations/{organization}".format(organization=organization,) @staticmethod @@ -290,7 +290,7 @@ def parse_common_organization_path(path: str) -> Dict[str, str]: @staticmethod def common_project_path(project: str,) -> str: - """Return a fully-qualified project string.""" + """Returns a fully-qualified project string.""" return "projects/{project}".format(project=project,) @staticmethod @@ -301,7 +301,7 @@ def parse_common_project_path(path: str) -> Dict[str, str]: @staticmethod def common_location_path(project: str, location: str,) -> str: - """Return a fully-qualified location string.""" + """Returns a fully-qualified location string.""" return "projects/{project}/locations/{location}".format( project=project, location=location, ) @@ -315,12 +315,12 @@ def parse_common_location_path(path: str) -> Dict[str, str]: def __init__( self, *, - credentials: Optional[credentials.Credentials] = None, + credentials: Optional[ga_credentials.Credentials] = None, transport: Union[str, DataprocMetastoreTransport, None] = None, client_options: Optional[client_options_lib.ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: - """Instantiate the dataproc metastore client. + """Instantiates the dataproc metastore client. Args: credentials (Optional[google.auth.credentials.Credentials]): The @@ -375,9 +375,10 @@ def __init__( client_cert_source_func = client_options.client_cert_source else: is_mtls = mtls.has_default_client_cert_source() - client_cert_source_func = ( - mtls.default_client_cert_source() if is_mtls else None - ) + if is_mtls: + client_cert_source_func = mtls.default_client_cert_source() + else: + client_cert_source_func = None # Figure out which api endpoint to use. if client_options.api_endpoint is not None: @@ -389,12 +390,14 @@ def __init__( elif use_mtls_env == "always": api_endpoint = self.DEFAULT_MTLS_ENDPOINT elif use_mtls_env == "auto": - api_endpoint = ( - self.DEFAULT_MTLS_ENDPOINT if is_mtls else self.DEFAULT_ENDPOINT - ) + if is_mtls: + api_endpoint = self.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = self.DEFAULT_ENDPOINT else: raise MutualTLSChannelError( - "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted values: never, auto, always" + "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted " + "values: never, auto, always" ) # Save or instantiate the transport. @@ -409,8 +412,8 @@ def __init__( ) if client_options.scopes: raise ValueError( - "When providing a transport instance, " - "provide its scopes directly." + "When providing a transport instance, provide its scopes " + "directly." ) self._transport = transport else: @@ -449,7 +452,6 @@ def list_services( This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -481,10 +483,8 @@ def list_services( # there are no flattened fields. if not isinstance(request, metastore.ListServicesRequest): request = metastore.ListServicesRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent @@ -534,7 +534,6 @@ def get_service( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -563,10 +562,8 @@ def get_service( # there are no flattened fields. if not isinstance(request, metastore.GetServiceRequest): request = metastore.GetServiceRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -637,7 +634,6 @@ def create_service( This corresponds to the ``service_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -669,10 +665,8 @@ def create_service( # there are no flattened fields. if not isinstance(request, metastore.CreateServiceRequest): request = metastore.CreateServiceRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent if service is not None: @@ -709,7 +703,7 @@ def update_service( request: metastore.UpdateServiceRequest = None, *, service: metastore.Service = None, - update_mask: field_mask.FieldMask = None, + update_mask: field_mask_pb2.FieldMask = None, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), @@ -741,7 +735,6 @@ def update_service( This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -773,10 +766,8 @@ def update_service( # there are no flattened fields. if not isinstance(request, metastore.UpdateServiceRequest): request = metastore.UpdateServiceRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if service is not None: request.service = service if update_mask is not None: @@ -832,7 +823,6 @@ def delete_service( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -874,10 +864,8 @@ def delete_service( # there are no flattened fields. if not isinstance(request, metastore.DeleteServiceRequest): request = metastore.DeleteServiceRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -898,7 +886,7 @@ def delete_service( response = operation.from_gapic( response, self._transport.operations_client, - empty.Empty, + empty_pb2.Empty, metadata_type=metastore.OperationMetadata, ) @@ -929,7 +917,6 @@ def list_metadata_imports( This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -961,10 +948,8 @@ def list_metadata_imports( # there are no flattened fields. if not isinstance(request, metastore.ListMetadataImportsRequest): request = metastore.ListMetadataImportsRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent @@ -1014,7 +999,6 @@ def get_metadata_import( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1043,10 +1027,8 @@ def get_metadata_import( # there are no flattened fields. if not isinstance(request, metastore.GetMetadataImportRequest): request = metastore.GetMetadataImportRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -1116,7 +1098,6 @@ def create_metadata_import( This corresponds to the ``metadata_import_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1148,10 +1129,8 @@ def create_metadata_import( # there are no flattened fields. if not isinstance(request, metastore.CreateMetadataImportRequest): request = metastore.CreateMetadataImportRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent if metadata_import is not None: @@ -1188,7 +1167,7 @@ def update_metadata_import( request: metastore.UpdateMetadataImportRequest = None, *, metadata_import: metastore.MetadataImport = None, - update_mask: field_mask.FieldMask = None, + update_mask: field_mask_pb2.FieldMask = None, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), @@ -1222,7 +1201,6 @@ def update_metadata_import( This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1254,10 +1232,8 @@ def update_metadata_import( # there are no flattened fields. if not isinstance(request, metastore.UpdateMetadataImportRequest): request = metastore.UpdateMetadataImportRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if metadata_import is not None: request.metadata_import = metadata_import if update_mask is not None: @@ -1303,7 +1279,6 @@ def export_metadata( request (google.cloud.metastore_v1beta.types.ExportMetadataRequest): The request object. Request message for [DataprocMetastore.ExportMetadata][google.cloud.metastore.v1beta.DataprocMetastore.ExportMetadata]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1320,7 +1295,6 @@ def export_metadata( """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes # in a metastore.ExportMetadataRequest. # There's no risk of modifying the input as we've already verified @@ -1386,7 +1360,6 @@ def restore_service( This corresponds to the ``backup`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1418,10 +1391,8 @@ def restore_service( # there are no flattened fields. if not isinstance(request, metastore.RestoreServiceRequest): request = metastore.RestoreServiceRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if service is not None: request.service = service if backup is not None: @@ -1475,7 +1446,6 @@ def list_backups( This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1507,10 +1477,8 @@ def list_backups( # there are no flattened fields. if not isinstance(request, metastore.ListBackupsRequest): request = metastore.ListBackupsRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent @@ -1560,7 +1528,6 @@ def get_backup( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1587,10 +1554,8 @@ def get_backup( # there are no flattened fields. if not isinstance(request, metastore.GetBackupRequest): request = metastore.GetBackupRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -1657,7 +1622,6 @@ def create_backup( This corresponds to the ``backup_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1689,10 +1653,8 @@ def create_backup( # there are no flattened fields. if not isinstance(request, metastore.CreateBackupRequest): request = metastore.CreateBackupRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent if backup is not None: @@ -1748,7 +1710,6 @@ def delete_backup( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1790,10 +1751,8 @@ def delete_backup( # there are no flattened fields. if not isinstance(request, metastore.DeleteBackupRequest): request = metastore.DeleteBackupRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -1814,7 +1773,7 @@ def delete_backup( response = operation.from_gapic( response, self._transport.operations_client, - empty.Empty, + empty_pb2.Empty, metadata_type=metastore.OperationMetadata, ) diff --git a/google/cloud/metastore_v1beta/services/dataproc_metastore/pagers.py b/google/cloud/metastore_v1beta/services/dataproc_metastore/pagers.py index 6bef300..996dceb 100644 --- a/google/cloud/metastore_v1beta/services/dataproc_metastore/pagers.py +++ b/google/cloud/metastore_v1beta/services/dataproc_metastore/pagers.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from typing import ( Any, AsyncIterable, @@ -117,7 +115,7 @@ def __init__( *, metadata: Sequence[Tuple[str, str]] = () ): - """Instantiate the pager. + """Instantiates the pager. Args: method (Callable): The method that was originally called, and @@ -245,7 +243,7 @@ def __init__( *, metadata: Sequence[Tuple[str, str]] = () ): - """Instantiate the pager. + """Instantiates the pager. Args: method (Callable): The method that was originally called, and @@ -373,7 +371,7 @@ def __init__( *, metadata: Sequence[Tuple[str, str]] = () ): - """Instantiate the pager. + """Instantiates the pager. Args: method (Callable): The method that was originally called, and diff --git a/google/cloud/metastore_v1beta/services/dataproc_metastore/transports/__init__.py b/google/cloud/metastore_v1beta/services/dataproc_metastore/transports/__init__.py index fa2687a..2f902e5 100644 --- a/google/cloud/metastore_v1beta/services/dataproc_metastore/transports/__init__.py +++ b/google/cloud/metastore_v1beta/services/dataproc_metastore/transports/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from collections import OrderedDict from typing import Dict, Type diff --git a/google/cloud/metastore_v1beta/services/dataproc_metastore/transports/base.py b/google/cloud/metastore_v1beta/services/dataproc_metastore/transports/base.py index 99c274c..a85a8d0 100644 --- a/google/cloud/metastore_v1beta/services/dataproc_metastore/transports/base.py +++ b/google/cloud/metastore_v1beta/services/dataproc_metastore/transports/base.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,21 +13,21 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import abc -import typing +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union +import packaging.version import pkg_resources -from google import auth # type: ignore -from google.api_core import exceptions # type: ignore +import google.auth # type: ignore +import google.api_core # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore from google.api_core import operations_v1 # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.cloud.metastore_v1beta.types import metastore -from google.longrunning import operations_pb2 as operations # type: ignore - +from google.longrunning import operations_pb2 # type: ignore try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( @@ -39,27 +38,41 @@ except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +try: + # google.auth.__version__ was added in 1.26.0 + _GOOGLE_AUTH_VERSION = google.auth.__version__ +except AttributeError: + try: # try pkg_resources if it is available + _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version + except pkg_resources.DistributionNotFound: # pragma: NO COVER + _GOOGLE_AUTH_VERSION = None + +_API_CORE_VERSION = google.api_core.__version__ + class DataprocMetastoreTransport(abc.ABC): """Abstract transport class for DataprocMetastore.""" AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + DEFAULT_HOST: str = "metastore.googleapis.com" + def __init__( self, *, - host: str = "metastore.googleapis.com", - credentials: credentials.Credentials = None, - credentials_file: typing.Optional[str] = None, - scopes: typing.Optional[typing.Sequence[str]] = AUTH_SCOPES, - quota_project_id: typing.Optional[str] = None, + host: str = DEFAULT_HOST, + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, **kwargs, ) -> None: """Instantiate the transport. Args: - host (Optional[str]): The hostname to connect to. + host (Optional[str]): + The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -68,7 +81,7 @@ def __init__( credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. This argument is mutually exclusive with credentials. - scope (Optional[Sequence[str]]): A list of scopes. + scopes (Optional[Sequence[str]]): A list of scopes. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -82,29 +95,76 @@ def __init__( host += ":443" self._host = host + scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + # Save the scopes. self._scopes = scopes or self.AUTH_SCOPES # If no credentials are provided, then determine the appropriate # defaults. if credentials and credentials_file: - raise exceptions.DuplicateCredentialArgs( + raise core_exceptions.DuplicateCredentialArgs( "'credentials_file' and 'credentials' are mutually exclusive" ) if credentials_file is not None: - credentials, _ = auth.load_credentials_from_file( - credentials_file, scopes=self._scopes, quota_project_id=quota_project_id + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id ) elif credentials is None: - credentials, _ = auth.default( - scopes=self._scopes, quota_project_id=quota_project_id + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id ) # Save the credentials. self._credentials = credentials + # TODO(busunkim): These two class methods are in the base transport + # to avoid duplicating code across the transport classes. These functions + # should be deleted once the minimum required versions of google-api-core + # and google-auth are increased. + + # TODO: Remove this function once google-auth >= 1.25.0 is required + @classmethod + def _get_scopes_kwargs( + cls, host: str, scopes: Optional[Sequence[str]] + ) -> Dict[str, Optional[Sequence[str]]]: + """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" + + scopes_kwargs = {} + + if _GOOGLE_AUTH_VERSION and ( + packaging.version.parse(_GOOGLE_AUTH_VERSION) + >= packaging.version.parse("1.25.0") + ): + scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} + else: + scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} + + return scopes_kwargs + + # TODO: Remove this function once google-api-core >= 1.26.0 is required + @classmethod + def _get_self_signed_jwt_kwargs( + cls, host: str, scopes: Optional[Sequence[str]] + ) -> Dict[str, Union[Optional[Sequence[str]], str]]: + """Returns kwargs to pass to grpc_helpers.create_channel depending on the google-api-core version""" + + self_signed_jwt_kwargs: Dict[str, Union[Optional[Sequence[str]], str]] = {} + + if _API_CORE_VERSION and ( + packaging.version.parse(_API_CORE_VERSION) + >= packaging.version.parse("1.26.0") + ): + self_signed_jwt_kwargs["default_scopes"] = cls.AUTH_SCOPES + self_signed_jwt_kwargs["scopes"] = scopes + self_signed_jwt_kwargs["default_host"] = cls.DEFAULT_HOST + else: + self_signed_jwt_kwargs["scopes"] = scopes or cls.AUTH_SCOPES + + return self_signed_jwt_kwargs + def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -169,11 +229,10 @@ def operations_client(self) -> operations_v1.OperationsClient: @property def list_services( self, - ) -> typing.Callable[ + ) -> Callable[ [metastore.ListServicesRequest], - typing.Union[ - metastore.ListServicesResponse, - typing.Awaitable[metastore.ListServicesResponse], + Union[ + metastore.ListServicesResponse, Awaitable[metastore.ListServicesResponse] ], ]: raise NotImplementedError() @@ -181,47 +240,47 @@ def list_services( @property def get_service( self, - ) -> typing.Callable[ + ) -> Callable[ [metastore.GetServiceRequest], - typing.Union[metastore.Service, typing.Awaitable[metastore.Service]], + Union[metastore.Service, Awaitable[metastore.Service]], ]: raise NotImplementedError() @property def create_service( self, - ) -> typing.Callable[ + ) -> Callable[ [metastore.CreateServiceRequest], - typing.Union[operations.Operation, typing.Awaitable[operations.Operation]], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], ]: raise NotImplementedError() @property def update_service( self, - ) -> typing.Callable[ + ) -> Callable[ [metastore.UpdateServiceRequest], - typing.Union[operations.Operation, typing.Awaitable[operations.Operation]], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], ]: raise NotImplementedError() @property def delete_service( self, - ) -> typing.Callable[ + ) -> Callable[ [metastore.DeleteServiceRequest], - typing.Union[operations.Operation, typing.Awaitable[operations.Operation]], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], ]: raise NotImplementedError() @property def list_metadata_imports( self, - ) -> typing.Callable[ + ) -> Callable[ [metastore.ListMetadataImportsRequest], - typing.Union[ + Union[ metastore.ListMetadataImportsResponse, - typing.Awaitable[metastore.ListMetadataImportsResponse], + Awaitable[metastore.ListMetadataImportsResponse], ], ]: raise NotImplementedError() @@ -229,86 +288,81 @@ def list_metadata_imports( @property def get_metadata_import( self, - ) -> typing.Callable[ + ) -> Callable[ [metastore.GetMetadataImportRequest], - typing.Union[ - metastore.MetadataImport, typing.Awaitable[metastore.MetadataImport] - ], + Union[metastore.MetadataImport, Awaitable[metastore.MetadataImport]], ]: raise NotImplementedError() @property def create_metadata_import( self, - ) -> typing.Callable[ + ) -> Callable[ [metastore.CreateMetadataImportRequest], - typing.Union[operations.Operation, typing.Awaitable[operations.Operation]], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], ]: raise NotImplementedError() @property def update_metadata_import( self, - ) -> typing.Callable[ + ) -> Callable[ [metastore.UpdateMetadataImportRequest], - typing.Union[operations.Operation, typing.Awaitable[operations.Operation]], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], ]: raise NotImplementedError() @property def export_metadata( self, - ) -> typing.Callable[ + ) -> Callable[ [metastore.ExportMetadataRequest], - typing.Union[operations.Operation, typing.Awaitable[operations.Operation]], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], ]: raise NotImplementedError() @property def restore_service( self, - ) -> typing.Callable[ + ) -> Callable[ [metastore.RestoreServiceRequest], - typing.Union[operations.Operation, typing.Awaitable[operations.Operation]], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], ]: raise NotImplementedError() @property def list_backups( self, - ) -> typing.Callable[ + ) -> Callable[ [metastore.ListBackupsRequest], - typing.Union[ - metastore.ListBackupsResponse, - typing.Awaitable[metastore.ListBackupsResponse], - ], + Union[metastore.ListBackupsResponse, Awaitable[metastore.ListBackupsResponse]], ]: raise NotImplementedError() @property def get_backup( self, - ) -> typing.Callable[ + ) -> Callable[ [metastore.GetBackupRequest], - typing.Union[metastore.Backup, typing.Awaitable[metastore.Backup]], + Union[metastore.Backup, Awaitable[metastore.Backup]], ]: raise NotImplementedError() @property def create_backup( self, - ) -> typing.Callable[ + ) -> Callable[ [metastore.CreateBackupRequest], - typing.Union[operations.Operation, typing.Awaitable[operations.Operation]], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], ]: raise NotImplementedError() @property def delete_backup( self, - ) -> typing.Callable[ + ) -> Callable[ [metastore.DeleteBackupRequest], - typing.Union[operations.Operation, typing.Awaitable[operations.Operation]], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], ]: raise NotImplementedError() diff --git a/google/cloud/metastore_v1beta/services/dataproc_metastore/transports/grpc.py b/google/cloud/metastore_v1beta/services/dataproc_metastore/transports/grpc.py index 026bb61..a8a1ed7 100644 --- a/google/cloud/metastore_v1beta/services/dataproc_metastore/transports/grpc.py +++ b/google/cloud/metastore_v1beta/services/dataproc_metastore/transports/grpc.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,22 +13,20 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple +from typing import Callable, Dict, Optional, Sequence, Tuple, Union from google.api_core import grpc_helpers # type: ignore from google.api_core import operations_v1 # type: ignore from google.api_core import gapic_v1 # type: ignore -from google import auth # type: ignore -from google.auth import credentials # type: ignore +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore import grpc # type: ignore from google.cloud.metastore_v1beta.types import metastore -from google.longrunning import operations_pb2 as operations # type: ignore - +from google.longrunning import operations_pb2 # type: ignore from .base import DataprocMetastoreTransport, DEFAULT_CLIENT_INFO @@ -74,7 +71,7 @@ def __init__( self, *, host: str = "metastore.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: str = None, scopes: Sequence[str] = None, channel: grpc.Channel = None, @@ -88,7 +85,8 @@ def __init__( """Instantiate the transport. Args: - host (Optional[str]): The hostname to connect to. + host (Optional[str]): + The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -199,7 +197,7 @@ def __init__( def create_channel( cls, host: str = "metastore.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: str = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, @@ -230,13 +228,15 @@ def create_channel( google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ - scopes = scopes or cls.AUTH_SCOPES + + self_signed_jwt_kwargs = cls._get_self_signed_jwt_kwargs(host, scopes) + return grpc_helpers.create_channel( host, credentials=credentials, credentials_file=credentials_file, - scopes=scopes, quota_project_id=quota_project_id, + **self_signed_jwt_kwargs, **kwargs, ) @@ -313,7 +313,7 @@ def get_service(self) -> Callable[[metastore.GetServiceRequest], metastore.Servi @property def create_service( self, - ) -> Callable[[metastore.CreateServiceRequest], operations.Operation]: + ) -> Callable[[metastore.CreateServiceRequest], operations_pb2.Operation]: r"""Return a callable for the create service method over gRPC. Creates a metastore service in a project and @@ -333,14 +333,14 @@ def create_service( self._stubs["create_service"] = self.grpc_channel.unary_unary( "/google.cloud.metastore.v1beta.DataprocMetastore/CreateService", request_serializer=metastore.CreateServiceRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["create_service"] @property def update_service( self, - ) -> Callable[[metastore.UpdateServiceRequest], operations.Operation]: + ) -> Callable[[metastore.UpdateServiceRequest], operations_pb2.Operation]: r"""Return a callable for the update service method over gRPC. Updates the parameters of a single service. @@ -359,14 +359,14 @@ def update_service( self._stubs["update_service"] = self.grpc_channel.unary_unary( "/google.cloud.metastore.v1beta.DataprocMetastore/UpdateService", request_serializer=metastore.UpdateServiceRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["update_service"] @property def delete_service( self, - ) -> Callable[[metastore.DeleteServiceRequest], operations.Operation]: + ) -> Callable[[metastore.DeleteServiceRequest], operations_pb2.Operation]: r"""Return a callable for the delete service method over gRPC. Deletes a single service. @@ -385,7 +385,7 @@ def delete_service( self._stubs["delete_service"] = self.grpc_channel.unary_unary( "/google.cloud.metastore.v1beta.DataprocMetastore/DeleteService", request_serializer=metastore.DeleteServiceRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["delete_service"] @@ -446,7 +446,7 @@ def get_metadata_import( @property def create_metadata_import( self, - ) -> Callable[[metastore.CreateMetadataImportRequest], operations.Operation]: + ) -> Callable[[metastore.CreateMetadataImportRequest], operations_pb2.Operation]: r"""Return a callable for the create metadata import method over gRPC. Creates a new MetadataImport in a given project and @@ -466,14 +466,14 @@ def create_metadata_import( self._stubs["create_metadata_import"] = self.grpc_channel.unary_unary( "/google.cloud.metastore.v1beta.DataprocMetastore/CreateMetadataImport", request_serializer=metastore.CreateMetadataImportRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["create_metadata_import"] @property def update_metadata_import( self, - ) -> Callable[[metastore.UpdateMetadataImportRequest], operations.Operation]: + ) -> Callable[[metastore.UpdateMetadataImportRequest], operations_pb2.Operation]: r"""Return a callable for the update metadata import method over gRPC. Updates a single import. @@ -494,14 +494,14 @@ def update_metadata_import( self._stubs["update_metadata_import"] = self.grpc_channel.unary_unary( "/google.cloud.metastore.v1beta.DataprocMetastore/UpdateMetadataImport", request_serializer=metastore.UpdateMetadataImportRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["update_metadata_import"] @property def export_metadata( self, - ) -> Callable[[metastore.ExportMetadataRequest], operations.Operation]: + ) -> Callable[[metastore.ExportMetadataRequest], operations_pb2.Operation]: r"""Return a callable for the export metadata method over gRPC. Exports metadata from a service. @@ -520,14 +520,14 @@ def export_metadata( self._stubs["export_metadata"] = self.grpc_channel.unary_unary( "/google.cloud.metastore.v1beta.DataprocMetastore/ExportMetadata", request_serializer=metastore.ExportMetadataRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["export_metadata"] @property def restore_service( self, - ) -> Callable[[metastore.RestoreServiceRequest], operations.Operation]: + ) -> Callable[[metastore.RestoreServiceRequest], operations_pb2.Operation]: r"""Return a callable for the restore service method over gRPC. Restores a service from a backup. @@ -546,7 +546,7 @@ def restore_service( self._stubs["restore_service"] = self.grpc_channel.unary_unary( "/google.cloud.metastore.v1beta.DataprocMetastore/RestoreService", request_serializer=metastore.RestoreServiceRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["restore_service"] @@ -603,7 +603,7 @@ def get_backup(self) -> Callable[[metastore.GetBackupRequest], metastore.Backup] @property def create_backup( self, - ) -> Callable[[metastore.CreateBackupRequest], operations.Operation]: + ) -> Callable[[metastore.CreateBackupRequest], operations_pb2.Operation]: r"""Return a callable for the create backup method over gRPC. Creates a new Backup in a given project and location. @@ -622,14 +622,14 @@ def create_backup( self._stubs["create_backup"] = self.grpc_channel.unary_unary( "/google.cloud.metastore.v1beta.DataprocMetastore/CreateBackup", request_serializer=metastore.CreateBackupRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["create_backup"] @property def delete_backup( self, - ) -> Callable[[metastore.DeleteBackupRequest], operations.Operation]: + ) -> Callable[[metastore.DeleteBackupRequest], operations_pb2.Operation]: r"""Return a callable for the delete backup method over gRPC. Deletes a single backup. @@ -648,7 +648,7 @@ def delete_backup( self._stubs["delete_backup"] = self.grpc_channel.unary_unary( "/google.cloud.metastore.v1beta.DataprocMetastore/DeleteBackup", request_serializer=metastore.DeleteBackupRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["delete_backup"] diff --git a/google/cloud/metastore_v1beta/services/dataproc_metastore/transports/grpc_asyncio.py b/google/cloud/metastore_v1beta/services/dataproc_metastore/transports/grpc_asyncio.py index 1870bb5..b8f6e1c 100644 --- a/google/cloud/metastore_v1beta/services/dataproc_metastore/transports/grpc_asyncio.py +++ b/google/cloud/metastore_v1beta/services/dataproc_metastore/transports/grpc_asyncio.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,23 +13,21 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union from google.api_core import gapic_v1 # type: ignore from google.api_core import grpc_helpers_async # type: ignore from google.api_core import operations_v1 # type: ignore -from google import auth # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +import packaging.version import grpc # type: ignore from grpc.experimental import aio # type: ignore from google.cloud.metastore_v1beta.types import metastore -from google.longrunning import operations_pb2 as operations # type: ignore - +from google.longrunning import operations_pb2 # type: ignore from .base import DataprocMetastoreTransport, DEFAULT_CLIENT_INFO from .grpc import DataprocMetastoreGrpcTransport @@ -77,7 +74,7 @@ class DataprocMetastoreGrpcAsyncIOTransport(DataprocMetastoreTransport): def create_channel( cls, host: str = "metastore.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, @@ -104,13 +101,15 @@ def create_channel( Returns: aio.Channel: A gRPC AsyncIO channel object. """ - scopes = scopes or cls.AUTH_SCOPES + + self_signed_jwt_kwargs = cls._get_self_signed_jwt_kwargs(host, scopes) + return grpc_helpers_async.create_channel( host, credentials=credentials, credentials_file=credentials_file, - scopes=scopes, quota_project_id=quota_project_id, + **self_signed_jwt_kwargs, **kwargs, ) @@ -118,7 +117,7 @@ def __init__( self, *, host: str = "metastore.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, channel: aio.Channel = None, @@ -132,7 +131,8 @@ def __init__( """Instantiate the transport. Args: - host (Optional[str]): The hostname to connect to. + host (Optional[str]): + The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -191,7 +191,6 @@ def __init__( # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None - else: if api_mtls_endpoint: host = api_mtls_endpoint @@ -323,7 +322,9 @@ def get_service( @property def create_service( self, - ) -> Callable[[metastore.CreateServiceRequest], Awaitable[operations.Operation]]: + ) -> Callable[ + [metastore.CreateServiceRequest], Awaitable[operations_pb2.Operation] + ]: r"""Return a callable for the create service method over gRPC. Creates a metastore service in a project and @@ -343,14 +344,16 @@ def create_service( self._stubs["create_service"] = self.grpc_channel.unary_unary( "/google.cloud.metastore.v1beta.DataprocMetastore/CreateService", request_serializer=metastore.CreateServiceRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["create_service"] @property def update_service( self, - ) -> Callable[[metastore.UpdateServiceRequest], Awaitable[operations.Operation]]: + ) -> Callable[ + [metastore.UpdateServiceRequest], Awaitable[operations_pb2.Operation] + ]: r"""Return a callable for the update service method over gRPC. Updates the parameters of a single service. @@ -369,14 +372,16 @@ def update_service( self._stubs["update_service"] = self.grpc_channel.unary_unary( "/google.cloud.metastore.v1beta.DataprocMetastore/UpdateService", request_serializer=metastore.UpdateServiceRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["update_service"] @property def delete_service( self, - ) -> Callable[[metastore.DeleteServiceRequest], Awaitable[operations.Operation]]: + ) -> Callable[ + [metastore.DeleteServiceRequest], Awaitable[operations_pb2.Operation] + ]: r"""Return a callable for the delete service method over gRPC. Deletes a single service. @@ -395,7 +400,7 @@ def delete_service( self._stubs["delete_service"] = self.grpc_channel.unary_unary( "/google.cloud.metastore.v1beta.DataprocMetastore/DeleteService", request_serializer=metastore.DeleteServiceRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["delete_service"] @@ -460,7 +465,7 @@ def get_metadata_import( def create_metadata_import( self, ) -> Callable[ - [metastore.CreateMetadataImportRequest], Awaitable[operations.Operation] + [metastore.CreateMetadataImportRequest], Awaitable[operations_pb2.Operation] ]: r"""Return a callable for the create metadata import method over gRPC. @@ -481,7 +486,7 @@ def create_metadata_import( self._stubs["create_metadata_import"] = self.grpc_channel.unary_unary( "/google.cloud.metastore.v1beta.DataprocMetastore/CreateMetadataImport", request_serializer=metastore.CreateMetadataImportRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["create_metadata_import"] @@ -489,7 +494,7 @@ def create_metadata_import( def update_metadata_import( self, ) -> Callable[ - [metastore.UpdateMetadataImportRequest], Awaitable[operations.Operation] + [metastore.UpdateMetadataImportRequest], Awaitable[operations_pb2.Operation] ]: r"""Return a callable for the update metadata import method over gRPC. @@ -511,14 +516,16 @@ def update_metadata_import( self._stubs["update_metadata_import"] = self.grpc_channel.unary_unary( "/google.cloud.metastore.v1beta.DataprocMetastore/UpdateMetadataImport", request_serializer=metastore.UpdateMetadataImportRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["update_metadata_import"] @property def export_metadata( self, - ) -> Callable[[metastore.ExportMetadataRequest], Awaitable[operations.Operation]]: + ) -> Callable[ + [metastore.ExportMetadataRequest], Awaitable[operations_pb2.Operation] + ]: r"""Return a callable for the export metadata method over gRPC. Exports metadata from a service. @@ -537,14 +544,16 @@ def export_metadata( self._stubs["export_metadata"] = self.grpc_channel.unary_unary( "/google.cloud.metastore.v1beta.DataprocMetastore/ExportMetadata", request_serializer=metastore.ExportMetadataRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["export_metadata"] @property def restore_service( self, - ) -> Callable[[metastore.RestoreServiceRequest], Awaitable[operations.Operation]]: + ) -> Callable[ + [metastore.RestoreServiceRequest], Awaitable[operations_pb2.Operation] + ]: r"""Return a callable for the restore service method over gRPC. Restores a service from a backup. @@ -563,7 +572,7 @@ def restore_service( self._stubs["restore_service"] = self.grpc_channel.unary_unary( "/google.cloud.metastore.v1beta.DataprocMetastore/RestoreService", request_serializer=metastore.RestoreServiceRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["restore_service"] @@ -624,7 +633,7 @@ def get_backup( @property def create_backup( self, - ) -> Callable[[metastore.CreateBackupRequest], Awaitable[operations.Operation]]: + ) -> Callable[[metastore.CreateBackupRequest], Awaitable[operations_pb2.Operation]]: r"""Return a callable for the create backup method over gRPC. Creates a new Backup in a given project and location. @@ -643,14 +652,14 @@ def create_backup( self._stubs["create_backup"] = self.grpc_channel.unary_unary( "/google.cloud.metastore.v1beta.DataprocMetastore/CreateBackup", request_serializer=metastore.CreateBackupRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["create_backup"] @property def delete_backup( self, - ) -> Callable[[metastore.DeleteBackupRequest], Awaitable[operations.Operation]]: + ) -> Callable[[metastore.DeleteBackupRequest], Awaitable[operations_pb2.Operation]]: r"""Return a callable for the delete backup method over gRPC. Deletes a single backup. @@ -669,7 +678,7 @@ def delete_backup( self._stubs["delete_backup"] = self.grpc_channel.unary_unary( "/google.cloud.metastore.v1beta.DataprocMetastore/DeleteBackup", request_serializer=metastore.DeleteBackupRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["delete_backup"] diff --git a/google/cloud/metastore_v1beta/types/__init__.py b/google/cloud/metastore_v1beta/types/__init__.py index 7914dc7..fa17cfe 100644 --- a/google/cloud/metastore_v1beta/types/__init__.py +++ b/google/cloud/metastore_v1beta/types/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from .metastore import ( Backup, CreateBackupRequest, diff --git a/google/cloud/metastore_v1beta/types/metastore.py b/google/cloud/metastore_v1beta/types/metastore.py index 187e8c1..7dddf16 100644 --- a/google/cloud/metastore_v1beta/types/metastore.py +++ b/google/cloud/metastore_v1beta/types/metastore.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,14 +13,12 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import proto # type: ignore - -from google.protobuf import field_mask_pb2 as field_mask # type: ignore -from google.protobuf import timestamp_pb2 as timestamp # type: ignore -from google.protobuf import wrappers_pb2 as wrappers # type: ignore -from google.type import dayofweek_pb2 as dayofweek # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.protobuf import wrappers_pb2 # type: ignore +from google.type import dayofweek_pb2 # type: ignore __protobuf__ = proto.module( @@ -66,7 +63,6 @@ class Service(proto.Message): r"""A managed metastore service that serves metadata queries. - Attributes: hive_metastore_config (google.cloud.metastore_v1beta.types.HiveMetastoreConfig): Configuration information specific to running @@ -164,43 +160,27 @@ class ReleaseChannel(proto.Enum): oneof="metastore_config", message="HiveMetastoreConfig", ) - - name = proto.Field(proto.STRING, number=1) - - create_time = proto.Field(proto.MESSAGE, number=2, message=timestamp.Timestamp,) - - update_time = proto.Field(proto.MESSAGE, number=3, message=timestamp.Timestamp,) - - labels = proto.MapField(proto.STRING, proto.STRING, number=4) - - network = proto.Field(proto.STRING, number=7) - - endpoint_uri = proto.Field(proto.STRING, number=8) - - port = proto.Field(proto.INT32, number=9) - + name = proto.Field(proto.STRING, number=1,) + create_time = proto.Field(proto.MESSAGE, number=2, message=timestamp_pb2.Timestamp,) + update_time = proto.Field(proto.MESSAGE, number=3, message=timestamp_pb2.Timestamp,) + labels = proto.MapField(proto.STRING, proto.STRING, number=4,) + network = proto.Field(proto.STRING, number=7,) + endpoint_uri = proto.Field(proto.STRING, number=8,) + port = proto.Field(proto.INT32, number=9,) state = proto.Field(proto.ENUM, number=10, enum=State,) - - state_message = proto.Field(proto.STRING, number=11) - - artifact_gcs_uri = proto.Field(proto.STRING, number=12) - + state_message = proto.Field(proto.STRING, number=11,) + artifact_gcs_uri = proto.Field(proto.STRING, number=12,) tier = proto.Field(proto.ENUM, number=13, enum=Tier,) - metadata_integration = proto.Field( proto.MESSAGE, number=14, message="MetadataIntegration", ) - maintenance_window = proto.Field( proto.MESSAGE, number=15, message="MaintenanceWindow", ) - - uid = proto.Field(proto.STRING, number=16) - + uid = proto.Field(proto.STRING, number=16,) metadata_management_activity = proto.Field( proto.MESSAGE, number=17, message="MetadataManagementActivity", ) - release_channel = proto.Field(proto.ENUM, number=19, enum=ReleaseChannel,) @@ -231,7 +211,7 @@ class DataCatalogConfig(proto.Message): Catalog. """ - enabled = proto.Field(proto.BOOL, number=2) + enabled = proto.Field(proto.BOOL, number=2,) class MaintenanceWindow(proto.Message): @@ -246,9 +226,8 @@ class MaintenanceWindow(proto.Message): The day of week, when the window starts. """ - hour_of_day = proto.Field(proto.MESSAGE, number=1, message=wrappers.Int32Value,) - - day_of_week = proto.Field(proto.ENUM, number=2, enum=dayofweek.DayOfWeek,) + hour_of_day = proto.Field(proto.MESSAGE, number=1, message=wrappers_pb2.Int32Value,) + day_of_week = proto.Field(proto.ENUM, number=2, enum=dayofweek_pb2.DayOfWeek,) class HiveMetastoreConfig(proto.Message): @@ -272,16 +251,13 @@ class HiveMetastoreConfig(proto.Message): the request's ``service``. """ - version = proto.Field(proto.STRING, number=1) - - config_overrides = proto.MapField(proto.STRING, proto.STRING, number=2) - + version = proto.Field(proto.STRING, number=1,) + config_overrides = proto.MapField(proto.STRING, proto.STRING, number=2,) kerberos_config = proto.Field(proto.MESSAGE, number=3, message="KerberosConfig",) class KerberosConfig(proto.Message): r"""Configuration information for a Kerberos principal. - Attributes: keytab (google.cloud.metastore_v1beta.types.Secret): A Kerberos keytab file that can be used to @@ -299,15 +275,12 @@ class KerberosConfig(proto.Message): """ keytab = proto.Field(proto.MESSAGE, number=1, message="Secret",) - - principal = proto.Field(proto.STRING, number=2) - - krb5_config_gcs_uri = proto.Field(proto.STRING, number=3) + principal = proto.Field(proto.STRING, number=2,) + krb5_config_gcs_uri = proto.Field(proto.STRING, number=3,) class Secret(proto.Message): r"""A securely stored value. - Attributes: cloud_secret (str): The relative resource name of a Secret Manager secret @@ -316,12 +289,11 @@ class Secret(proto.Message): ``projects/{project_number}/secrets/{secret_id}/versions/{version_id}``. """ - cloud_secret = proto.Field(proto.STRING, number=2, oneof="value") + cloud_secret = proto.Field(proto.STRING, number=2, oneof="value",) class MetadataManagementActivity(proto.Message): r"""The metadata management activities of the metastore service. - Attributes: metadata_exports (Sequence[google.cloud.metastore_v1beta.types.MetadataExport]): Output only. The latest metadata exports of @@ -334,13 +306,11 @@ class MetadataManagementActivity(proto.Message): metadata_exports = proto.RepeatedField( proto.MESSAGE, number=1, message="MetadataExport", ) - restores = proto.RepeatedField(proto.MESSAGE, number=2, message="Restore",) class MetadataImport(proto.Message): r"""A metastore resource that imports metadata. - Attributes: database_dump (google.cloud.metastore_v1beta.types.MetadataImport.DatabaseDump): Immutable. A database dump from a pre- @@ -397,31 +367,22 @@ class DatabaseType(proto.Enum): database_type = proto.Field( proto.ENUM, number=1, enum="MetadataImport.DatabaseDump.DatabaseType", ) - - gcs_uri = proto.Field(proto.STRING, number=2) - - source_database = proto.Field(proto.STRING, number=3) - + gcs_uri = proto.Field(proto.STRING, number=2,) + source_database = proto.Field(proto.STRING, number=3,) type_ = proto.Field(proto.ENUM, number=4, enum="DatabaseDumpSpec.Type",) database_dump = proto.Field( proto.MESSAGE, number=6, oneof="metadata", message=DatabaseDump, ) - - name = proto.Field(proto.STRING, number=1) - - description = proto.Field(proto.STRING, number=2) - - create_time = proto.Field(proto.MESSAGE, number=3, message=timestamp.Timestamp,) - - update_time = proto.Field(proto.MESSAGE, number=4, message=timestamp.Timestamp,) - + name = proto.Field(proto.STRING, number=1,) + description = proto.Field(proto.STRING, number=2,) + create_time = proto.Field(proto.MESSAGE, number=3, message=timestamp_pb2.Timestamp,) + update_time = proto.Field(proto.MESSAGE, number=4, message=timestamp_pb2.Timestamp,) state = proto.Field(proto.ENUM, number=5, enum=State,) class MetadataExport(proto.Message): r"""The details of a metadata export operation. - Attributes: destination_gcs_uri (str): Output only. A Cloud Storage URI of a folder that metadata @@ -447,14 +408,10 @@ class State(proto.Enum): FAILED = 3 CANCELLED = 4 - destination_gcs_uri = proto.Field(proto.STRING, number=4, oneof="destination") - - start_time = proto.Field(proto.MESSAGE, number=1, message=timestamp.Timestamp,) - - end_time = proto.Field(proto.MESSAGE, number=2, message=timestamp.Timestamp,) - + destination_gcs_uri = proto.Field(proto.STRING, number=4, oneof="destination",) + start_time = proto.Field(proto.MESSAGE, number=1, message=timestamp_pb2.Timestamp,) + end_time = proto.Field(proto.MESSAGE, number=2, message=timestamp_pb2.Timestamp,) state = proto.Field(proto.ENUM, number=3, enum=State,) - database_dump_type = proto.Field( proto.ENUM, number=5, enum="DatabaseDumpSpec.Type", ) @@ -462,7 +419,6 @@ class State(proto.Enum): class Backup(proto.Message): r"""The details of a backup resource. - Attributes: name (str): Immutable. The relative resource name of the backup, in the @@ -492,22 +448,16 @@ class State(proto.Enum): ACTIVE = 3 FAILED = 4 - name = proto.Field(proto.STRING, number=1) - - create_time = proto.Field(proto.MESSAGE, number=2, message=timestamp.Timestamp,) - - end_time = proto.Field(proto.MESSAGE, number=3, message=timestamp.Timestamp,) - + name = proto.Field(proto.STRING, number=1,) + create_time = proto.Field(proto.MESSAGE, number=2, message=timestamp_pb2.Timestamp,) + end_time = proto.Field(proto.MESSAGE, number=3, message=timestamp_pb2.Timestamp,) state = proto.Field(proto.ENUM, number=4, enum=State,) - service_revision = proto.Field(proto.MESSAGE, number=5, message="Service",) - - description = proto.Field(proto.STRING, number=6) + description = proto.Field(proto.STRING, number=6,) class Restore(proto.Message): r"""The details of a metadata restore operation. - Attributes: start_time (google.protobuf.timestamp_pb2.Timestamp): Output only. The time when the restore @@ -544,17 +494,12 @@ class RestoreType(proto.Enum): FULL = 1 METADATA_ONLY = 2 - start_time = proto.Field(proto.MESSAGE, number=1, message=timestamp.Timestamp,) - - end_time = proto.Field(proto.MESSAGE, number=2, message=timestamp.Timestamp,) - + start_time = proto.Field(proto.MESSAGE, number=1, message=timestamp_pb2.Timestamp,) + end_time = proto.Field(proto.MESSAGE, number=2, message=timestamp_pb2.Timestamp,) state = proto.Field(proto.ENUM, number=3, enum=State,) - - backup = proto.Field(proto.STRING, number=4) - + backup = proto.Field(proto.STRING, number=4,) type_ = proto.Field(proto.ENUM, number=5, enum=RestoreType,) - - details = proto.Field(proto.STRING, number=6) + details = proto.Field(proto.STRING, number=6,) class ListServicesRequest(proto.Message): @@ -594,15 +539,11 @@ class ListServicesRequest(proto.Message): order. """ - parent = proto.Field(proto.STRING, number=1) - - page_size = proto.Field(proto.INT32, number=2) - - page_token = proto.Field(proto.STRING, number=3) - - filter = proto.Field(proto.STRING, number=4) - - order_by = proto.Field(proto.STRING, number=5) + parent = proto.Field(proto.STRING, number=1,) + page_size = proto.Field(proto.INT32, number=2,) + page_token = proto.Field(proto.STRING, number=3,) + filter = proto.Field(proto.STRING, number=4,) + order_by = proto.Field(proto.STRING, number=5,) class ListServicesResponse(proto.Message): @@ -625,10 +566,8 @@ def raw_page(self): return self services = proto.RepeatedField(proto.MESSAGE, number=1, message="Service",) - - next_page_token = proto.Field(proto.STRING, number=2) - - unreachable = proto.RepeatedField(proto.STRING, number=3) + next_page_token = proto.Field(proto.STRING, number=2,) + unreachable = proto.RepeatedField(proto.STRING, number=3,) class GetServiceRequest(proto.Message): @@ -643,7 +582,7 @@ class GetServiceRequest(proto.Message): ``projects/{project_number}/locations/{location_id}/services/{service_id}``. """ - name = proto.Field(proto.STRING, number=1) + name = proto.Field(proto.STRING, number=1,) class CreateServiceRequest(proto.Message): @@ -686,13 +625,10 @@ class CreateServiceRequest(proto.Message): supported. """ - parent = proto.Field(proto.STRING, number=1) - - service_id = proto.Field(proto.STRING, number=2) - + parent = proto.Field(proto.STRING, number=1,) + service_id = proto.Field(proto.STRING, number=2,) service = proto.Field(proto.MESSAGE, number=3, message="Service",) - - request_id = proto.Field(proto.STRING, number=4) + request_id = proto.Field(proto.STRING, number=4,) class UpdateServiceRequest(proto.Message): @@ -731,11 +667,11 @@ class UpdateServiceRequest(proto.Message): supported. """ - update_mask = proto.Field(proto.MESSAGE, number=1, message=field_mask.FieldMask,) - + update_mask = proto.Field( + proto.MESSAGE, number=1, message=field_mask_pb2.FieldMask, + ) service = proto.Field(proto.MESSAGE, number=2, message="Service",) - - request_id = proto.Field(proto.STRING, number=3) + request_id = proto.Field(proto.STRING, number=3,) class DeleteServiceRequest(proto.Message): @@ -766,9 +702,8 @@ class DeleteServiceRequest(proto.Message): supported. """ - name = proto.Field(proto.STRING, number=1) - - request_id = proto.Field(proto.STRING, number=2) + name = proto.Field(proto.STRING, number=1,) + request_id = proto.Field(proto.STRING, number=2,) class ListMetadataImportsRequest(proto.Message): @@ -808,15 +743,11 @@ class ListMetadataImportsRequest(proto.Message): order. """ - parent = proto.Field(proto.STRING, number=1) - - page_size = proto.Field(proto.INT32, number=2) - - page_token = proto.Field(proto.STRING, number=3) - - filter = proto.Field(proto.STRING, number=4) - - order_by = proto.Field(proto.STRING, number=5) + parent = proto.Field(proto.STRING, number=1,) + page_size = proto.Field(proto.INT32, number=2,) + page_token = proto.Field(proto.STRING, number=3,) + filter = proto.Field(proto.STRING, number=4,) + order_by = proto.Field(proto.STRING, number=5,) class ListMetadataImportsResponse(proto.Message): @@ -841,10 +772,8 @@ def raw_page(self): metadata_imports = proto.RepeatedField( proto.MESSAGE, number=1, message="MetadataImport", ) - - next_page_token = proto.Field(proto.STRING, number=2) - - unreachable = proto.RepeatedField(proto.STRING, number=3) + next_page_token = proto.Field(proto.STRING, number=2,) + unreachable = proto.RepeatedField(proto.STRING, number=3,) class GetMetadataImportRequest(proto.Message): @@ -859,7 +788,7 @@ class GetMetadataImportRequest(proto.Message): ``projects/{project_number}/locations/{location_id}/services/{service_id}/metadataImports/{import_id}``. """ - name = proto.Field(proto.STRING, number=1) + name = proto.Field(proto.STRING, number=1,) class CreateMetadataImportRequest(proto.Message): @@ -902,13 +831,10 @@ class CreateMetadataImportRequest(proto.Message): supported. """ - parent = proto.Field(proto.STRING, number=1) - - metadata_import_id = proto.Field(proto.STRING, number=2) - + parent = proto.Field(proto.STRING, number=1,) + metadata_import_id = proto.Field(proto.STRING, number=2,) metadata_import = proto.Field(proto.MESSAGE, number=3, message="MetadataImport",) - - request_id = proto.Field(proto.STRING, number=4) + request_id = proto.Field(proto.STRING, number=4,) class UpdateMetadataImportRequest(proto.Message): @@ -947,11 +873,11 @@ class UpdateMetadataImportRequest(proto.Message): supported. """ - update_mask = proto.Field(proto.MESSAGE, number=1, message=field_mask.FieldMask,) - + update_mask = proto.Field( + proto.MESSAGE, number=1, message=field_mask_pb2.FieldMask, + ) metadata_import = proto.Field(proto.MESSAGE, number=2, message="MetadataImport",) - - request_id = proto.Field(proto.STRING, number=3) + request_id = proto.Field(proto.STRING, number=3,) class ListBackupsRequest(proto.Message): @@ -991,15 +917,11 @@ class ListBackupsRequest(proto.Message): order. """ - parent = proto.Field(proto.STRING, number=1) - - page_size = proto.Field(proto.INT32, number=2) - - page_token = proto.Field(proto.STRING, number=3) - - filter = proto.Field(proto.STRING, number=4) - - order_by = proto.Field(proto.STRING, number=5) + parent = proto.Field(proto.STRING, number=1,) + page_size = proto.Field(proto.INT32, number=2,) + page_token = proto.Field(proto.STRING, number=3,) + filter = proto.Field(proto.STRING, number=4,) + order_by = proto.Field(proto.STRING, number=5,) class ListBackupsResponse(proto.Message): @@ -1022,10 +944,8 @@ def raw_page(self): return self backups = proto.RepeatedField(proto.MESSAGE, number=1, message="Backup",) - - next_page_token = proto.Field(proto.STRING, number=2) - - unreachable = proto.RepeatedField(proto.STRING, number=3) + next_page_token = proto.Field(proto.STRING, number=2,) + unreachable = proto.RepeatedField(proto.STRING, number=3,) class GetBackupRequest(proto.Message): @@ -1040,7 +960,7 @@ class GetBackupRequest(proto.Message): ``projects/{project_number}/locations/{location_id}/services/{service_id}/backups/{backup_id}``. """ - name = proto.Field(proto.STRING, number=1) + name = proto.Field(proto.STRING, number=1,) class CreateBackupRequest(proto.Message): @@ -1083,13 +1003,10 @@ class CreateBackupRequest(proto.Message): supported. """ - parent = proto.Field(proto.STRING, number=1) - - backup_id = proto.Field(proto.STRING, number=2) - + parent = proto.Field(proto.STRING, number=1,) + backup_id = proto.Field(proto.STRING, number=2,) backup = proto.Field(proto.MESSAGE, number=3, message="Backup",) - - request_id = proto.Field(proto.STRING, number=4) + request_id = proto.Field(proto.STRING, number=4,) class DeleteBackupRequest(proto.Message): @@ -1120,9 +1037,8 @@ class DeleteBackupRequest(proto.Message): supported. """ - name = proto.Field(proto.STRING, number=1) - - request_id = proto.Field(proto.STRING, number=2) + name = proto.Field(proto.STRING, number=1,) + request_id = proto.Field(proto.STRING, number=2,) class ExportMetadataRequest(proto.Message): @@ -1161,12 +1077,9 @@ class ExportMetadataRequest(proto.Message): defaults to ``MYSQL``. """ - destination_gcs_folder = proto.Field(proto.STRING, number=2, oneof="destination") - - service = proto.Field(proto.STRING, number=1) - - request_id = proto.Field(proto.STRING, number=3) - + destination_gcs_folder = proto.Field(proto.STRING, number=2, oneof="destination",) + service = proto.Field(proto.STRING, number=1,) + request_id = proto.Field(proto.STRING, number=3,) database_dump_type = proto.Field( proto.ENUM, number=4, enum="DatabaseDumpSpec.Type", ) @@ -1174,7 +1087,6 @@ class ExportMetadataRequest(proto.Message): class RestoreServiceRequest(proto.Message): r"""Request message for [DataprocMetastore.Restore][]. - Attributes: service (str): Required. The relative resource name of the metastore @@ -1207,18 +1119,14 @@ class RestoreServiceRequest(proto.Message): supported. """ - service = proto.Field(proto.STRING, number=1) - - backup = proto.Field(proto.STRING, number=2) - + service = proto.Field(proto.STRING, number=1,) + backup = proto.Field(proto.STRING, number=2,) restore_type = proto.Field(proto.ENUM, number=3, enum="Restore.RestoreType",) - - request_id = proto.Field(proto.STRING, number=4) + request_id = proto.Field(proto.STRING, number=4,) class OperationMetadata(proto.Message): r"""Represents the metadata of a long-running operation. - Attributes: create_time (google.protobuf.timestamp_pb2.Timestamp): Output only. The time the operation was @@ -1246,24 +1154,17 @@ class OperationMetadata(proto.Message): operation. """ - create_time = proto.Field(proto.MESSAGE, number=1, message=timestamp.Timestamp,) - - end_time = proto.Field(proto.MESSAGE, number=2, message=timestamp.Timestamp,) - - target = proto.Field(proto.STRING, number=3) - - verb = proto.Field(proto.STRING, number=4) - - status_message = proto.Field(proto.STRING, number=5) - - requested_cancellation = proto.Field(proto.BOOL, number=6) - - api_version = proto.Field(proto.STRING, number=7) + create_time = proto.Field(proto.MESSAGE, number=1, message=timestamp_pb2.Timestamp,) + end_time = proto.Field(proto.MESSAGE, number=2, message=timestamp_pb2.Timestamp,) + target = proto.Field(proto.STRING, number=3,) + verb = proto.Field(proto.STRING, number=4,) + status_message = proto.Field(proto.STRING, number=5,) + requested_cancellation = proto.Field(proto.BOOL, number=6,) + api_version = proto.Field(proto.STRING, number=7,) class LocationMetadata(proto.Message): r"""Metadata about the service in a location. - Attributes: supported_hive_metastore_versions (Sequence[google.cloud.metastore_v1beta.types.LocationMetadata.HiveMetastoreVersion]): The versions of Hive Metastore that can be used when @@ -1286,9 +1187,8 @@ class HiveMetastoreVersion(proto.Message): that omits the ``version``. """ - version = proto.Field(proto.STRING, number=1) - - is_default = proto.Field(proto.BOOL, number=2) + version = proto.Field(proto.STRING, number=1,) + is_default = proto.Field(proto.BOOL, number=2,) supported_hive_metastore_versions = proto.RepeatedField( proto.MESSAGE, number=1, message=HiveMetastoreVersion, @@ -1298,7 +1198,7 @@ class HiveMetastoreVersion(proto.Message): class DatabaseDumpSpec(proto.Message): r"""The specification of database dump to import from or export to. - """ + """ class Type(proto.Enum): r"""The type of the database dump.""" diff --git a/owlbot.py b/owlbot.py index 21a7d30..e201197 100644 --- a/owlbot.py +++ b/owlbot.py @@ -22,7 +22,7 @@ common = gcp.CommonTemplates() -default_version = "v1beta" +default_version = "v1" for library in s.get_staging_dirs(default_version): s.move(library, excludes=["setup.py", "README.rst", "docs/index.rst"]) diff --git a/scripts/fixup_metastore_v1_keywords.py b/scripts/fixup_metastore_v1_keywords.py new file mode 100644 index 0000000..e09d167 --- /dev/null +++ b/scripts/fixup_metastore_v1_keywords.py @@ -0,0 +1,185 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class metastoreCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'create_metadata_import': ('parent', 'metadata_import_id', 'metadata_import', 'request_id', ), + 'create_service': ('parent', 'service_id', 'service', 'request_id', ), + 'delete_service': ('name', 'request_id', ), + 'export_metadata': ('service', 'destination_gcs_folder', 'request_id', 'database_dump_type', ), + 'get_metadata_import': ('name', ), + 'get_service': ('name', ), + 'list_metadata_imports': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'list_services': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'update_metadata_import': ('update_mask', 'metadata_import', 'request_id', ), + 'update_service': ('update_mask', 'service', 'request_id', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: not a.keyword.value in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=metastoreCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the metastore client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/scripts/fixup_metastore_v1alpha_keywords.py b/scripts/fixup_metastore_v1alpha_keywords.py index 5b018ba..2cdabbc 100644 --- a/scripts/fixup_metastore_v1alpha_keywords.py +++ b/scripts/fixup_metastore_v1alpha_keywords.py @@ -1,6 +1,5 @@ #! /usr/bin/env python3 # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -15,7 +14,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import argparse import os import libcst as cst @@ -41,22 +39,21 @@ def partition( class metastoreCallTransformer(cst.CSTTransformer): CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'create_backup': ('parent', 'backup_id', 'backup', 'request_id', ), - 'create_metadata_import': ('parent', 'metadata_import_id', 'metadata_import', 'request_id', ), - 'create_service': ('parent', 'service_id', 'service', 'request_id', ), - 'delete_backup': ('name', 'request_id', ), - 'delete_service': ('name', 'request_id', ), - 'export_metadata': ('service', 'destination_gcs_folder', 'request_id', 'database_dump_type', ), - 'get_backup': ('name', ), - 'get_metadata_import': ('name', ), - 'get_service': ('name', ), - 'list_backups': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), - 'list_metadata_imports': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), - 'list_services': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), - 'restore_service': ('service', 'backup', 'restore_type', 'request_id', ), - 'update_metadata_import': ('update_mask', 'metadata_import', 'request_id', ), - 'update_service': ('update_mask', 'service', 'request_id', ), - + 'create_backup': ('parent', 'backup_id', 'backup', 'request_id', ), + 'create_metadata_import': ('parent', 'metadata_import_id', 'metadata_import', 'request_id', ), + 'create_service': ('parent', 'service_id', 'service', 'request_id', ), + 'delete_backup': ('name', 'request_id', ), + 'delete_service': ('name', 'request_id', ), + 'export_metadata': ('service', 'destination_gcs_folder', 'request_id', 'database_dump_type', ), + 'get_backup': ('name', ), + 'get_metadata_import': ('name', ), + 'get_service': ('name', ), + 'list_backups': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'list_metadata_imports': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'list_services': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'restore_service': ('service', 'backup', 'restore_type', 'request_id', ), + 'update_metadata_import': ('update_mask', 'metadata_import', 'request_id', ), + 'update_service': ('update_mask', 'service', 'request_id', ), } def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: @@ -87,7 +84,7 @@ def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: value=cst.Dict([ cst.DictElement( cst.SimpleString("'{}'".format(name)), - cst.Element(value=arg.value) +cst.Element(value=arg.value) ) # Note: the args + kwargs looks silly, but keep in mind that # the control parameters had to be stripped out, and that diff --git a/scripts/fixup_metastore_v1beta_keywords.py b/scripts/fixup_metastore_v1beta_keywords.py index 5b018ba..2cdabbc 100644 --- a/scripts/fixup_metastore_v1beta_keywords.py +++ b/scripts/fixup_metastore_v1beta_keywords.py @@ -1,6 +1,5 @@ #! /usr/bin/env python3 # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -15,7 +14,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import argparse import os import libcst as cst @@ -41,22 +39,21 @@ def partition( class metastoreCallTransformer(cst.CSTTransformer): CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'create_backup': ('parent', 'backup_id', 'backup', 'request_id', ), - 'create_metadata_import': ('parent', 'metadata_import_id', 'metadata_import', 'request_id', ), - 'create_service': ('parent', 'service_id', 'service', 'request_id', ), - 'delete_backup': ('name', 'request_id', ), - 'delete_service': ('name', 'request_id', ), - 'export_metadata': ('service', 'destination_gcs_folder', 'request_id', 'database_dump_type', ), - 'get_backup': ('name', ), - 'get_metadata_import': ('name', ), - 'get_service': ('name', ), - 'list_backups': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), - 'list_metadata_imports': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), - 'list_services': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), - 'restore_service': ('service', 'backup', 'restore_type', 'request_id', ), - 'update_metadata_import': ('update_mask', 'metadata_import', 'request_id', ), - 'update_service': ('update_mask', 'service', 'request_id', ), - + 'create_backup': ('parent', 'backup_id', 'backup', 'request_id', ), + 'create_metadata_import': ('parent', 'metadata_import_id', 'metadata_import', 'request_id', ), + 'create_service': ('parent', 'service_id', 'service', 'request_id', ), + 'delete_backup': ('name', 'request_id', ), + 'delete_service': ('name', 'request_id', ), + 'export_metadata': ('service', 'destination_gcs_folder', 'request_id', 'database_dump_type', ), + 'get_backup': ('name', ), + 'get_metadata_import': ('name', ), + 'get_service': ('name', ), + 'list_backups': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'list_metadata_imports': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'list_services': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'restore_service': ('service', 'backup', 'restore_type', 'request_id', ), + 'update_metadata_import': ('update_mask', 'metadata_import', 'request_id', ), + 'update_service': ('update_mask', 'service', 'request_id', ), } def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: @@ -87,7 +84,7 @@ def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: value=cst.Dict([ cst.DictElement( cst.SimpleString("'{}'".format(name)), - cst.Element(value=arg.value) +cst.Element(value=arg.value) ) # Note: the args + kwargs looks silly, but keep in mind that # the control parameters had to be stripped out, and that diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 0000000..4de6597 --- /dev/null +++ b/tests/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/tests/unit/__init__.py b/tests/unit/__init__.py new file mode 100644 index 0000000..4de6597 --- /dev/null +++ b/tests/unit/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/tests/unit/gapic/__init__.py b/tests/unit/gapic/__init__.py new file mode 100644 index 0000000..4de6597 --- /dev/null +++ b/tests/unit/gapic/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/tests/unit/gapic/metastore_v1/__init__.py b/tests/unit/gapic/metastore_v1/__init__.py new file mode 100644 index 0000000..4de6597 --- /dev/null +++ b/tests/unit/gapic/metastore_v1/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/tests/unit/gapic/metastore_v1/test_dataproc_metastore.py b/tests/unit/gapic/metastore_v1/test_dataproc_metastore.py new file mode 100644 index 0000000..e51c2a1 --- /dev/null +++ b/tests/unit/gapic/metastore_v1/test_dataproc_metastore.py @@ -0,0 +1,3780 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +import mock +import packaging.version + +import grpc +from grpc.experimental import aio +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule + + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import future +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import operation_async # type: ignore +from google.api_core import operations_v1 +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.metastore_v1.services.dataproc_metastore import ( + DataprocMetastoreAsyncClient, +) +from google.cloud.metastore_v1.services.dataproc_metastore import ( + DataprocMetastoreClient, +) +from google.cloud.metastore_v1.services.dataproc_metastore import pagers +from google.cloud.metastore_v1.services.dataproc_metastore import transports +from google.cloud.metastore_v1.services.dataproc_metastore.transports.base import ( + _API_CORE_VERSION, +) +from google.cloud.metastore_v1.services.dataproc_metastore.transports.base import ( + _GOOGLE_AUTH_VERSION, +) +from google.cloud.metastore_v1.types import metastore +from google.longrunning import operations_pb2 +from google.oauth2 import service_account +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.protobuf import wrappers_pb2 # type: ignore +from google.type import dayofweek_pb2 # type: ignore +import google.auth + + +# TODO(busunkim): Once google-api-core >= 1.26.0 is required: +# - Delete all the api-core and auth "less than" test cases +# - Delete these pytest markers (Make the "greater than or equal to" tests the default). +requires_google_auth_lt_1_25_0 = pytest.mark.skipif( + packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), + reason="This test requires google-auth < 1.25.0", +) +requires_google_auth_gte_1_25_0 = pytest.mark.skipif( + packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), + reason="This test requires google-auth >= 1.25.0", +) + +requires_api_core_lt_1_26_0 = pytest.mark.skipif( + packaging.version.parse(_API_CORE_VERSION) >= packaging.version.parse("1.26.0"), + reason="This test requires google-api-core < 1.26.0", +) + +requires_api_core_gte_1_26_0 = pytest.mark.skipif( + packaging.version.parse(_API_CORE_VERSION) < packaging.version.parse("1.26.0"), + reason="This test requires google-api-core >= 1.26.0", +) + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert DataprocMetastoreClient._get_default_mtls_endpoint(None) is None + assert ( + DataprocMetastoreClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + DataprocMetastoreClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + DataprocMetastoreClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + DataprocMetastoreClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + DataprocMetastoreClient._get_default_mtls_endpoint(non_googleapi) + == non_googleapi + ) + + +@pytest.mark.parametrize( + "client_class", [DataprocMetastoreClient, DataprocMetastoreAsyncClient,] +) +def test_dataproc_metastore_client_from_service_account_info(client_class): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == "metastore.googleapis.com:443" + + +@pytest.mark.parametrize( + "client_class", [DataprocMetastoreClient, DataprocMetastoreAsyncClient,] +) +def test_dataproc_metastore_client_from_service_account_file(client_class): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json") + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json") + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == "metastore.googleapis.com:443" + + +def test_dataproc_metastore_client_get_transport_class(): + transport = DataprocMetastoreClient.get_transport_class() + available_transports = [ + transports.DataprocMetastoreGrpcTransport, + ] + assert transport in available_transports + + transport = DataprocMetastoreClient.get_transport_class("grpc") + assert transport == transports.DataprocMetastoreGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (DataprocMetastoreClient, transports.DataprocMetastoreGrpcTransport, "grpc"), + ( + DataprocMetastoreAsyncClient, + transports.DataprocMetastoreGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ], +) +@mock.patch.object( + DataprocMetastoreClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(DataprocMetastoreClient), +) +@mock.patch.object( + DataprocMetastoreAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(DataprocMetastoreAsyncClient), +) +def test_dataproc_metastore_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(DataprocMetastoreClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(DataprocMetastoreClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class() + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError): + client = client_class() + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + DataprocMetastoreClient, + transports.DataprocMetastoreGrpcTransport, + "grpc", + "true", + ), + ( + DataprocMetastoreAsyncClient, + transports.DataprocMetastoreGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + ( + DataprocMetastoreClient, + transports.DataprocMetastoreGrpcTransport, + "grpc", + "false", + ), + ( + DataprocMetastoreAsyncClient, + transports.DataprocMetastoreGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ], +) +@mock.patch.object( + DataprocMetastoreClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(DataprocMetastoreClient), +) +@mock.patch.object( + DataprocMetastoreAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(DataprocMetastoreAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_dataproc_metastore_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (DataprocMetastoreClient, transports.DataprocMetastoreGrpcTransport, "grpc"), + ( + DataprocMetastoreAsyncClient, + transports.DataprocMetastoreGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ], +) +def test_dataproc_metastore_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions(scopes=["1", "2"],) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (DataprocMetastoreClient, transports.DataprocMetastoreGrpcTransport, "grpc"), + ( + DataprocMetastoreAsyncClient, + transports.DataprocMetastoreGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ], +) +def test_dataproc_metastore_client_client_options_credentials_file( + client_class, transport_class, transport_name +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +def test_dataproc_metastore_client_client_options_from_dict(): + with mock.patch( + "google.cloud.metastore_v1.services.dataproc_metastore.transports.DataprocMetastoreGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = DataprocMetastoreClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +def test_list_services( + transport: str = "grpc", request_type=metastore.ListServicesRequest +): + client = DataprocMetastoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_services), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = metastore.ListServicesResponse( + next_page_token="next_page_token_value", unreachable=["unreachable_value"], + ) + response = client.list_services(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.ListServicesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListServicesPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_services_from_dict(): + test_list_services(request_type=dict) + + +def test_list_services_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataprocMetastoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_services), "__call__") as call: + client.list_services() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.ListServicesRequest() + + +@pytest.mark.asyncio +async def test_list_services_async( + transport: str = "grpc_asyncio", request_type=metastore.ListServicesRequest +): + client = DataprocMetastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_services), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + metastore.ListServicesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_services(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.ListServicesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListServicesAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.asyncio +async def test_list_services_async_from_dict(): + await test_list_services_async(request_type=dict) + + +def test_list_services_field_headers(): + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metastore.ListServicesRequest() + + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_services), "__call__") as call: + call.return_value = metastore.ListServicesResponse() + client.list_services(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_services_field_headers_async(): + client = DataprocMetastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metastore.ListServicesRequest() + + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_services), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + metastore.ListServicesResponse() + ) + await client.list_services(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +def test_list_services_flattened(): + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_services), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = metastore.ListServicesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_services(parent="parent_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].parent == "parent_value" + + +def test_list_services_flattened_error(): + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_services( + metastore.ListServicesRequest(), parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_services_flattened_async(): + client = DataprocMetastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_services), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = metastore.ListServicesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + metastore.ListServicesResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_services(parent="parent_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].parent == "parent_value" + + +@pytest.mark.asyncio +async def test_list_services_flattened_error_async(): + client = DataprocMetastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_services( + metastore.ListServicesRequest(), parent="parent_value", + ) + + +def test_list_services_pager(): + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_services), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + metastore.ListServicesResponse( + services=[ + metastore.Service(), + metastore.Service(), + metastore.Service(), + ], + next_page_token="abc", + ), + metastore.ListServicesResponse(services=[], next_page_token="def",), + metastore.ListServicesResponse( + services=[metastore.Service(),], next_page_token="ghi", + ), + metastore.ListServicesResponse( + services=[metastore.Service(), metastore.Service(),], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_services(request={}) + + assert pager._metadata == metadata + + results = [i for i in pager] + assert len(results) == 6 + assert all(isinstance(i, metastore.Service) for i in results) + + +def test_list_services_pages(): + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_services), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + metastore.ListServicesResponse( + services=[ + metastore.Service(), + metastore.Service(), + metastore.Service(), + ], + next_page_token="abc", + ), + metastore.ListServicesResponse(services=[], next_page_token="def",), + metastore.ListServicesResponse( + services=[metastore.Service(),], next_page_token="ghi", + ), + metastore.ListServicesResponse( + services=[metastore.Service(), metastore.Service(),], + ), + RuntimeError, + ) + pages = list(client.list_services(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_services_async_pager(): + client = DataprocMetastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_services), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + metastore.ListServicesResponse( + services=[ + metastore.Service(), + metastore.Service(), + metastore.Service(), + ], + next_page_token="abc", + ), + metastore.ListServicesResponse(services=[], next_page_token="def",), + metastore.ListServicesResponse( + services=[metastore.Service(),], next_page_token="ghi", + ), + metastore.ListServicesResponse( + services=[metastore.Service(), metastore.Service(),], + ), + RuntimeError, + ) + async_pager = await client.list_services(request={},) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, metastore.Service) for i in responses) + + +@pytest.mark.asyncio +async def test_list_services_async_pages(): + client = DataprocMetastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_services), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + metastore.ListServicesResponse( + services=[ + metastore.Service(), + metastore.Service(), + metastore.Service(), + ], + next_page_token="abc", + ), + metastore.ListServicesResponse(services=[], next_page_token="def",), + metastore.ListServicesResponse( + services=[metastore.Service(),], next_page_token="ghi", + ), + metastore.ListServicesResponse( + services=[metastore.Service(), metastore.Service(),], + ), + RuntimeError, + ) + pages = [] + async for page_ in (await client.list_services(request={})).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_get_service(transport: str = "grpc", request_type=metastore.GetServiceRequest): + client = DataprocMetastoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_service), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = metastore.Service( + name="name_value", + network="network_value", + endpoint_uri="endpoint_uri_value", + port=453, + state=metastore.Service.State.CREATING, + state_message="state_message_value", + artifact_gcs_uri="artifact_gcs_uri_value", + tier=metastore.Service.Tier.DEVELOPER, + uid="uid_value", + release_channel=metastore.Service.ReleaseChannel.CANARY, + hive_metastore_config=metastore.HiveMetastoreConfig( + version="version_value" + ), + ) + response = client.get_service(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.GetServiceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, metastore.Service) + assert response.name == "name_value" + assert response.network == "network_value" + assert response.endpoint_uri == "endpoint_uri_value" + assert response.port == 453 + assert response.state == metastore.Service.State.CREATING + assert response.state_message == "state_message_value" + assert response.artifact_gcs_uri == "artifact_gcs_uri_value" + assert response.tier == metastore.Service.Tier.DEVELOPER + assert response.uid == "uid_value" + assert response.release_channel == metastore.Service.ReleaseChannel.CANARY + + +def test_get_service_from_dict(): + test_get_service(request_type=dict) + + +def test_get_service_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataprocMetastoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_service), "__call__") as call: + client.get_service() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.GetServiceRequest() + + +@pytest.mark.asyncio +async def test_get_service_async( + transport: str = "grpc_asyncio", request_type=metastore.GetServiceRequest +): + client = DataprocMetastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_service), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + metastore.Service( + name="name_value", + network="network_value", + endpoint_uri="endpoint_uri_value", + port=453, + state=metastore.Service.State.CREATING, + state_message="state_message_value", + artifact_gcs_uri="artifact_gcs_uri_value", + tier=metastore.Service.Tier.DEVELOPER, + uid="uid_value", + release_channel=metastore.Service.ReleaseChannel.CANARY, + ) + ) + response = await client.get_service(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.GetServiceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, metastore.Service) + assert response.name == "name_value" + assert response.network == "network_value" + assert response.endpoint_uri == "endpoint_uri_value" + assert response.port == 453 + assert response.state == metastore.Service.State.CREATING + assert response.state_message == "state_message_value" + assert response.artifact_gcs_uri == "artifact_gcs_uri_value" + assert response.tier == metastore.Service.Tier.DEVELOPER + assert response.uid == "uid_value" + assert response.release_channel == metastore.Service.ReleaseChannel.CANARY + + +@pytest.mark.asyncio +async def test_get_service_async_from_dict(): + await test_get_service_async(request_type=dict) + + +def test_get_service_field_headers(): + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metastore.GetServiceRequest() + + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_service), "__call__") as call: + call.return_value = metastore.Service() + client.get_service(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_service_field_headers_async(): + client = DataprocMetastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metastore.GetServiceRequest() + + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_service), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Service()) + await client.get_service(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_get_service_flattened(): + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_service), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = metastore.Service() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_service(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].name == "name_value" + + +def test_get_service_flattened_error(): + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_service( + metastore.GetServiceRequest(), name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_service_flattened_async(): + client = DataprocMetastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_service), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = metastore.Service() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Service()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_service(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].name == "name_value" + + +@pytest.mark.asyncio +async def test_get_service_flattened_error_async(): + client = DataprocMetastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_service( + metastore.GetServiceRequest(), name="name_value", + ) + + +def test_create_service( + transport: str = "grpc", request_type=metastore.CreateServiceRequest +): + client = DataprocMetastoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_service), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_service(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.CreateServiceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_service_from_dict(): + test_create_service(request_type=dict) + + +def test_create_service_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataprocMetastoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_service), "__call__") as call: + client.create_service() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.CreateServiceRequest() + + +@pytest.mark.asyncio +async def test_create_service_async( + transport: str = "grpc_asyncio", request_type=metastore.CreateServiceRequest +): + client = DataprocMetastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_service), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_service(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.CreateServiceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_service_async_from_dict(): + await test_create_service_async(request_type=dict) + + +def test_create_service_field_headers(): + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metastore.CreateServiceRequest() + + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_service), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_service(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_service_field_headers_async(): + client = DataprocMetastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metastore.CreateServiceRequest() + + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_service), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.create_service(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +def test_create_service_flattened(): + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_service), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_service( + parent="parent_value", + service=metastore.Service( + hive_metastore_config=metastore.HiveMetastoreConfig( + version="version_value" + ) + ), + service_id="service_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].parent == "parent_value" + assert args[0].service == metastore.Service( + hive_metastore_config=metastore.HiveMetastoreConfig(version="version_value") + ) + assert args[0].service_id == "service_id_value" + + +def test_create_service_flattened_error(): + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_service( + metastore.CreateServiceRequest(), + parent="parent_value", + service=metastore.Service( + hive_metastore_config=metastore.HiveMetastoreConfig( + version="version_value" + ) + ), + service_id="service_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_service_flattened_async(): + client = DataprocMetastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_service), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_service( + parent="parent_value", + service=metastore.Service( + hive_metastore_config=metastore.HiveMetastoreConfig( + version="version_value" + ) + ), + service_id="service_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].parent == "parent_value" + assert args[0].service == metastore.Service( + hive_metastore_config=metastore.HiveMetastoreConfig(version="version_value") + ) + assert args[0].service_id == "service_id_value" + + +@pytest.mark.asyncio +async def test_create_service_flattened_error_async(): + client = DataprocMetastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_service( + metastore.CreateServiceRequest(), + parent="parent_value", + service=metastore.Service( + hive_metastore_config=metastore.HiveMetastoreConfig( + version="version_value" + ) + ), + service_id="service_id_value", + ) + + +def test_update_service( + transport: str = "grpc", request_type=metastore.UpdateServiceRequest +): + client = DataprocMetastoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_service), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.update_service(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.UpdateServiceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_service_from_dict(): + test_update_service(request_type=dict) + + +def test_update_service_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataprocMetastoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_service), "__call__") as call: + client.update_service() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.UpdateServiceRequest() + + +@pytest.mark.asyncio +async def test_update_service_async( + transport: str = "grpc_asyncio", request_type=metastore.UpdateServiceRequest +): + client = DataprocMetastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_service), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.update_service(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.UpdateServiceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_service_async_from_dict(): + await test_update_service_async(request_type=dict) + + +def test_update_service_field_headers(): + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metastore.UpdateServiceRequest() + + request.service.name = "service.name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_service), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_service(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "service.name=service.name/value",) in kw[ + "metadata" + ] + + +@pytest.mark.asyncio +async def test_update_service_field_headers_async(): + client = DataprocMetastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metastore.UpdateServiceRequest() + + request.service.name = "service.name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_service), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.update_service(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "service.name=service.name/value",) in kw[ + "metadata" + ] + + +def test_update_service_flattened(): + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_service), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_service( + service=metastore.Service( + hive_metastore_config=metastore.HiveMetastoreConfig( + version="version_value" + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].service == metastore.Service( + hive_metastore_config=metastore.HiveMetastoreConfig(version="version_value") + ) + assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) + + +def test_update_service_flattened_error(): + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_service( + metastore.UpdateServiceRequest(), + service=metastore.Service( + hive_metastore_config=metastore.HiveMetastoreConfig( + version="version_value" + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_service_flattened_async(): + client = DataprocMetastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_service), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_service( + service=metastore.Service( + hive_metastore_config=metastore.HiveMetastoreConfig( + version="version_value" + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].service == metastore.Service( + hive_metastore_config=metastore.HiveMetastoreConfig(version="version_value") + ) + assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) + + +@pytest.mark.asyncio +async def test_update_service_flattened_error_async(): + client = DataprocMetastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_service( + metastore.UpdateServiceRequest(), + service=metastore.Service( + hive_metastore_config=metastore.HiveMetastoreConfig( + version="version_value" + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_delete_service( + transport: str = "grpc", request_type=metastore.DeleteServiceRequest +): + client = DataprocMetastoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_service), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_service(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.DeleteServiceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_service_from_dict(): + test_delete_service(request_type=dict) + + +def test_delete_service_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataprocMetastoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_service), "__call__") as call: + client.delete_service() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.DeleteServiceRequest() + + +@pytest.mark.asyncio +async def test_delete_service_async( + transport: str = "grpc_asyncio", request_type=metastore.DeleteServiceRequest +): + client = DataprocMetastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_service), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_service(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.DeleteServiceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_service_async_from_dict(): + await test_delete_service_async(request_type=dict) + + +def test_delete_service_field_headers(): + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metastore.DeleteServiceRequest() + + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_service), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_service(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_service_field_headers_async(): + client = DataprocMetastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metastore.DeleteServiceRequest() + + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_service), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.delete_service(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_delete_service_flattened(): + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_service), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_service(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].name == "name_value" + + +def test_delete_service_flattened_error(): + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_service( + metastore.DeleteServiceRequest(), name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_service_flattened_async(): + client = DataprocMetastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_service), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_service(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].name == "name_value" + + +@pytest.mark.asyncio +async def test_delete_service_flattened_error_async(): + client = DataprocMetastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_service( + metastore.DeleteServiceRequest(), name="name_value", + ) + + +def test_list_metadata_imports( + transport: str = "grpc", request_type=metastore.ListMetadataImportsRequest +): + client = DataprocMetastoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_metadata_imports), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = metastore.ListMetadataImportsResponse( + next_page_token="next_page_token_value", unreachable=["unreachable_value"], + ) + response = client.list_metadata_imports(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.ListMetadataImportsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListMetadataImportsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_metadata_imports_from_dict(): + test_list_metadata_imports(request_type=dict) + + +def test_list_metadata_imports_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataprocMetastoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_metadata_imports), "__call__" + ) as call: + client.list_metadata_imports() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.ListMetadataImportsRequest() + + +@pytest.mark.asyncio +async def test_list_metadata_imports_async( + transport: str = "grpc_asyncio", request_type=metastore.ListMetadataImportsRequest +): + client = DataprocMetastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_metadata_imports), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + metastore.ListMetadataImportsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_metadata_imports(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.ListMetadataImportsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListMetadataImportsAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.asyncio +async def test_list_metadata_imports_async_from_dict(): + await test_list_metadata_imports_async(request_type=dict) + + +def test_list_metadata_imports_field_headers(): + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metastore.ListMetadataImportsRequest() + + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_metadata_imports), "__call__" + ) as call: + call.return_value = metastore.ListMetadataImportsResponse() + client.list_metadata_imports(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_metadata_imports_field_headers_async(): + client = DataprocMetastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metastore.ListMetadataImportsRequest() + + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_metadata_imports), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + metastore.ListMetadataImportsResponse() + ) + await client.list_metadata_imports(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +def test_list_metadata_imports_flattened(): + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_metadata_imports), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = metastore.ListMetadataImportsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_metadata_imports(parent="parent_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].parent == "parent_value" + + +def test_list_metadata_imports_flattened_error(): + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_metadata_imports( + metastore.ListMetadataImportsRequest(), parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_metadata_imports_flattened_async(): + client = DataprocMetastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_metadata_imports), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = metastore.ListMetadataImportsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + metastore.ListMetadataImportsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_metadata_imports(parent="parent_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].parent == "parent_value" + + +@pytest.mark.asyncio +async def test_list_metadata_imports_flattened_error_async(): + client = DataprocMetastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_metadata_imports( + metastore.ListMetadataImportsRequest(), parent="parent_value", + ) + + +def test_list_metadata_imports_pager(): + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_metadata_imports), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + metastore.ListMetadataImportsResponse( + metadata_imports=[ + metastore.MetadataImport(), + metastore.MetadataImport(), + metastore.MetadataImport(), + ], + next_page_token="abc", + ), + metastore.ListMetadataImportsResponse( + metadata_imports=[], next_page_token="def", + ), + metastore.ListMetadataImportsResponse( + metadata_imports=[metastore.MetadataImport(),], next_page_token="ghi", + ), + metastore.ListMetadataImportsResponse( + metadata_imports=[ + metastore.MetadataImport(), + metastore.MetadataImport(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_metadata_imports(request={}) + + assert pager._metadata == metadata + + results = [i for i in pager] + assert len(results) == 6 + assert all(isinstance(i, metastore.MetadataImport) for i in results) + + +def test_list_metadata_imports_pages(): + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_metadata_imports), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + metastore.ListMetadataImportsResponse( + metadata_imports=[ + metastore.MetadataImport(), + metastore.MetadataImport(), + metastore.MetadataImport(), + ], + next_page_token="abc", + ), + metastore.ListMetadataImportsResponse( + metadata_imports=[], next_page_token="def", + ), + metastore.ListMetadataImportsResponse( + metadata_imports=[metastore.MetadataImport(),], next_page_token="ghi", + ), + metastore.ListMetadataImportsResponse( + metadata_imports=[ + metastore.MetadataImport(), + metastore.MetadataImport(), + ], + ), + RuntimeError, + ) + pages = list(client.list_metadata_imports(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_metadata_imports_async_pager(): + client = DataprocMetastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_metadata_imports), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + metastore.ListMetadataImportsResponse( + metadata_imports=[ + metastore.MetadataImport(), + metastore.MetadataImport(), + metastore.MetadataImport(), + ], + next_page_token="abc", + ), + metastore.ListMetadataImportsResponse( + metadata_imports=[], next_page_token="def", + ), + metastore.ListMetadataImportsResponse( + metadata_imports=[metastore.MetadataImport(),], next_page_token="ghi", + ), + metastore.ListMetadataImportsResponse( + metadata_imports=[ + metastore.MetadataImport(), + metastore.MetadataImport(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_metadata_imports(request={},) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, metastore.MetadataImport) for i in responses) + + +@pytest.mark.asyncio +async def test_list_metadata_imports_async_pages(): + client = DataprocMetastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_metadata_imports), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + metastore.ListMetadataImportsResponse( + metadata_imports=[ + metastore.MetadataImport(), + metastore.MetadataImport(), + metastore.MetadataImport(), + ], + next_page_token="abc", + ), + metastore.ListMetadataImportsResponse( + metadata_imports=[], next_page_token="def", + ), + metastore.ListMetadataImportsResponse( + metadata_imports=[metastore.MetadataImport(),], next_page_token="ghi", + ), + metastore.ListMetadataImportsResponse( + metadata_imports=[ + metastore.MetadataImport(), + metastore.MetadataImport(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in (await client.list_metadata_imports(request={})).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_get_metadata_import( + transport: str = "grpc", request_type=metastore.GetMetadataImportRequest +): + client = DataprocMetastoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_metadata_import), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = metastore.MetadataImport( + name="name_value", + description="description_value", + state=metastore.MetadataImport.State.RUNNING, + database_dump=metastore.MetadataImport.DatabaseDump( + database_type=metastore.MetadataImport.DatabaseDump.DatabaseType.MYSQL + ), + ) + response = client.get_metadata_import(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.GetMetadataImportRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, metastore.MetadataImport) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.state == metastore.MetadataImport.State.RUNNING + + +def test_get_metadata_import_from_dict(): + test_get_metadata_import(request_type=dict) + + +def test_get_metadata_import_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataprocMetastoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_metadata_import), "__call__" + ) as call: + client.get_metadata_import() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.GetMetadataImportRequest() + + +@pytest.mark.asyncio +async def test_get_metadata_import_async( + transport: str = "grpc_asyncio", request_type=metastore.GetMetadataImportRequest +): + client = DataprocMetastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_metadata_import), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + metastore.MetadataImport( + name="name_value", + description="description_value", + state=metastore.MetadataImport.State.RUNNING, + ) + ) + response = await client.get_metadata_import(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.GetMetadataImportRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, metastore.MetadataImport) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.state == metastore.MetadataImport.State.RUNNING + + +@pytest.mark.asyncio +async def test_get_metadata_import_async_from_dict(): + await test_get_metadata_import_async(request_type=dict) + + +def test_get_metadata_import_field_headers(): + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metastore.GetMetadataImportRequest() + + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_metadata_import), "__call__" + ) as call: + call.return_value = metastore.MetadataImport() + client.get_metadata_import(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_metadata_import_field_headers_async(): + client = DataprocMetastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metastore.GetMetadataImportRequest() + + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_metadata_import), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + metastore.MetadataImport() + ) + await client.get_metadata_import(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_get_metadata_import_flattened(): + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_metadata_import), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = metastore.MetadataImport() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_metadata_import(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].name == "name_value" + + +def test_get_metadata_import_flattened_error(): + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_metadata_import( + metastore.GetMetadataImportRequest(), name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_metadata_import_flattened_async(): + client = DataprocMetastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_metadata_import), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = metastore.MetadataImport() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + metastore.MetadataImport() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_metadata_import(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].name == "name_value" + + +@pytest.mark.asyncio +async def test_get_metadata_import_flattened_error_async(): + client = DataprocMetastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_metadata_import( + metastore.GetMetadataImportRequest(), name="name_value", + ) + + +def test_create_metadata_import( + transport: str = "grpc", request_type=metastore.CreateMetadataImportRequest +): + client = DataprocMetastoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_metadata_import), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_metadata_import(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.CreateMetadataImportRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_metadata_import_from_dict(): + test_create_metadata_import(request_type=dict) + + +def test_create_metadata_import_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataprocMetastoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_metadata_import), "__call__" + ) as call: + client.create_metadata_import() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.CreateMetadataImportRequest() + + +@pytest.mark.asyncio +async def test_create_metadata_import_async( + transport: str = "grpc_asyncio", request_type=metastore.CreateMetadataImportRequest +): + client = DataprocMetastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_metadata_import), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_metadata_import(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.CreateMetadataImportRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_metadata_import_async_from_dict(): + await test_create_metadata_import_async(request_type=dict) + + +def test_create_metadata_import_field_headers(): + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metastore.CreateMetadataImportRequest() + + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_metadata_import), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_metadata_import(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_metadata_import_field_headers_async(): + client = DataprocMetastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metastore.CreateMetadataImportRequest() + + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_metadata_import), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.create_metadata_import(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +def test_create_metadata_import_flattened(): + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_metadata_import), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_metadata_import( + parent="parent_value", + metadata_import=metastore.MetadataImport( + database_dump=metastore.MetadataImport.DatabaseDump( + database_type=metastore.MetadataImport.DatabaseDump.DatabaseType.MYSQL + ) + ), + metadata_import_id="metadata_import_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].parent == "parent_value" + assert args[0].metadata_import == metastore.MetadataImport( + database_dump=metastore.MetadataImport.DatabaseDump( + database_type=metastore.MetadataImport.DatabaseDump.DatabaseType.MYSQL + ) + ) + assert args[0].metadata_import_id == "metadata_import_id_value" + + +def test_create_metadata_import_flattened_error(): + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_metadata_import( + metastore.CreateMetadataImportRequest(), + parent="parent_value", + metadata_import=metastore.MetadataImport( + database_dump=metastore.MetadataImport.DatabaseDump( + database_type=metastore.MetadataImport.DatabaseDump.DatabaseType.MYSQL + ) + ), + metadata_import_id="metadata_import_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_metadata_import_flattened_async(): + client = DataprocMetastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_metadata_import), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_metadata_import( + parent="parent_value", + metadata_import=metastore.MetadataImport( + database_dump=metastore.MetadataImport.DatabaseDump( + database_type=metastore.MetadataImport.DatabaseDump.DatabaseType.MYSQL + ) + ), + metadata_import_id="metadata_import_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].parent == "parent_value" + assert args[0].metadata_import == metastore.MetadataImport( + database_dump=metastore.MetadataImport.DatabaseDump( + database_type=metastore.MetadataImport.DatabaseDump.DatabaseType.MYSQL + ) + ) + assert args[0].metadata_import_id == "metadata_import_id_value" + + +@pytest.mark.asyncio +async def test_create_metadata_import_flattened_error_async(): + client = DataprocMetastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_metadata_import( + metastore.CreateMetadataImportRequest(), + parent="parent_value", + metadata_import=metastore.MetadataImport( + database_dump=metastore.MetadataImport.DatabaseDump( + database_type=metastore.MetadataImport.DatabaseDump.DatabaseType.MYSQL + ) + ), + metadata_import_id="metadata_import_id_value", + ) + + +def test_update_metadata_import( + transport: str = "grpc", request_type=metastore.UpdateMetadataImportRequest +): + client = DataprocMetastoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_metadata_import), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.update_metadata_import(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.UpdateMetadataImportRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_metadata_import_from_dict(): + test_update_metadata_import(request_type=dict) + + +def test_update_metadata_import_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataprocMetastoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_metadata_import), "__call__" + ) as call: + client.update_metadata_import() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.UpdateMetadataImportRequest() + + +@pytest.mark.asyncio +async def test_update_metadata_import_async( + transport: str = "grpc_asyncio", request_type=metastore.UpdateMetadataImportRequest +): + client = DataprocMetastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_metadata_import), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.update_metadata_import(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.UpdateMetadataImportRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_metadata_import_async_from_dict(): + await test_update_metadata_import_async(request_type=dict) + + +def test_update_metadata_import_field_headers(): + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metastore.UpdateMetadataImportRequest() + + request.metadata_import.name = "metadata_import.name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_metadata_import), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_metadata_import(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "metadata_import.name=metadata_import.name/value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_metadata_import_field_headers_async(): + client = DataprocMetastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metastore.UpdateMetadataImportRequest() + + request.metadata_import.name = "metadata_import.name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_metadata_import), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.update_metadata_import(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "metadata_import.name=metadata_import.name/value", + ) in kw["metadata"] + + +def test_update_metadata_import_flattened(): + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_metadata_import), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_metadata_import( + metadata_import=metastore.MetadataImport( + database_dump=metastore.MetadataImport.DatabaseDump( + database_type=metastore.MetadataImport.DatabaseDump.DatabaseType.MYSQL + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].metadata_import == metastore.MetadataImport( + database_dump=metastore.MetadataImport.DatabaseDump( + database_type=metastore.MetadataImport.DatabaseDump.DatabaseType.MYSQL + ) + ) + assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) + + +def test_update_metadata_import_flattened_error(): + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_metadata_import( + metastore.UpdateMetadataImportRequest(), + metadata_import=metastore.MetadataImport( + database_dump=metastore.MetadataImport.DatabaseDump( + database_type=metastore.MetadataImport.DatabaseDump.DatabaseType.MYSQL + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_metadata_import_flattened_async(): + client = DataprocMetastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_metadata_import), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_metadata_import( + metadata_import=metastore.MetadataImport( + database_dump=metastore.MetadataImport.DatabaseDump( + database_type=metastore.MetadataImport.DatabaseDump.DatabaseType.MYSQL + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].metadata_import == metastore.MetadataImport( + database_dump=metastore.MetadataImport.DatabaseDump( + database_type=metastore.MetadataImport.DatabaseDump.DatabaseType.MYSQL + ) + ) + assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) + + +@pytest.mark.asyncio +async def test_update_metadata_import_flattened_error_async(): + client = DataprocMetastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_metadata_import( + metastore.UpdateMetadataImportRequest(), + metadata_import=metastore.MetadataImport( + database_dump=metastore.MetadataImport.DatabaseDump( + database_type=metastore.MetadataImport.DatabaseDump.DatabaseType.MYSQL + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_export_metadata( + transport: str = "grpc", request_type=metastore.ExportMetadataRequest +): + client = DataprocMetastoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.export_metadata), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.export_metadata(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.ExportMetadataRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_export_metadata_from_dict(): + test_export_metadata(request_type=dict) + + +def test_export_metadata_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataprocMetastoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.export_metadata), "__call__") as call: + client.export_metadata() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.ExportMetadataRequest() + + +@pytest.mark.asyncio +async def test_export_metadata_async( + transport: str = "grpc_asyncio", request_type=metastore.ExportMetadataRequest +): + client = DataprocMetastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.export_metadata), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.export_metadata(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == metastore.ExportMetadataRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_export_metadata_async_from_dict(): + await test_export_metadata_async(request_type=dict) + + +def test_export_metadata_field_headers(): + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metastore.ExportMetadataRequest() + + request.service = "service/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.export_metadata), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.export_metadata(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "service=service/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_export_metadata_field_headers_async(): + client = DataprocMetastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metastore.ExportMetadataRequest() + + request.service = "service/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.export_metadata), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.export_metadata(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "service=service/value",) in kw["metadata"] + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.DataprocMetastoreGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DataprocMetastoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.DataprocMetastoreGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DataprocMetastoreClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.DataprocMetastoreGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DataprocMetastoreClient( + client_options={"scopes": ["1", "2"]}, transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.DataprocMetastoreGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = DataprocMetastoreClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.DataprocMetastoreGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.DataprocMetastoreGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DataprocMetastoreGrpcTransport, + transports.DataprocMetastoreGrpcAsyncIOTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials(),) + assert isinstance(client.transport, transports.DataprocMetastoreGrpcTransport,) + + +def test_dataproc_metastore_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.DataprocMetastoreTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_dataproc_metastore_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.metastore_v1.services.dataproc_metastore.transports.DataprocMetastoreTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.DataprocMetastoreTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "list_services", + "get_service", + "create_service", + "update_service", + "delete_service", + "list_metadata_imports", + "get_metadata_import", + "create_metadata_import", + "update_metadata_import", + "export_metadata", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + + +@requires_google_auth_gte_1_25_0 +def test_dataproc_metastore_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.metastore_v1.services.dataproc_metastore.transports.DataprocMetastoreTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.DataprocMetastoreTransport( + credentials_file="credentials.json", quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@requires_google_auth_lt_1_25_0 +def test_dataproc_metastore_base_transport_with_credentials_file_old_google_auth(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.metastore_v1.services.dataproc_metastore.transports.DataprocMetastoreTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.DataprocMetastoreTransport( + credentials_file="credentials.json", quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_dataproc_metastore_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.metastore_v1.services.dataproc_metastore.transports.DataprocMetastoreTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.DataprocMetastoreTransport() + adc.assert_called_once() + + +@requires_google_auth_gte_1_25_0 +def test_dataproc_metastore_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + DataprocMetastoreClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@requires_google_auth_lt_1_25_0 +def test_dataproc_metastore_auth_adc_old_google_auth(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + DataprocMetastoreClient() + adc.assert_called_once_with( + scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DataprocMetastoreGrpcTransport, + transports.DataprocMetastoreGrpcAsyncIOTransport, + ], +) +@requires_google_auth_gte_1_25_0 +def test_dataproc_metastore_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DataprocMetastoreGrpcTransport, + transports.DataprocMetastoreGrpcAsyncIOTransport, + ], +) +@requires_google_auth_lt_1_25_0 +def test_dataproc_metastore_transport_auth_adc_old_google_auth(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus") + adc.assert_called_once_with( + scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.DataprocMetastoreGrpcTransport, grpc_helpers), + (transports.DataprocMetastoreGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +@requires_api_core_gte_1_26_0 +def test_dataproc_metastore_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "metastore.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="metastore.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.DataprocMetastoreGrpcTransport, grpc_helpers), + (transports.DataprocMetastoreGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +@requires_api_core_lt_1_26_0 +def test_dataproc_metastore_transport_create_channel_old_api_core( + transport_class, grpc_helpers +): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus") + + create_channel.assert_called_with( + "metastore.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + scopes=("https://www.googleapis.com/auth/cloud-platform",), + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.DataprocMetastoreGrpcTransport, grpc_helpers), + (transports.DataprocMetastoreGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +@requires_api_core_lt_1_26_0 +def test_dataproc_metastore_transport_create_channel_user_scopes( + transport_class, grpc_helpers +): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "metastore.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + scopes=["1", "2"], + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DataprocMetastoreGrpcTransport, + transports.DataprocMetastoreGrpcAsyncIOTransport, + ], +) +def test_dataproc_metastore_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=("https://www.googleapis.com/auth/cloud-platform",), + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_dataproc_metastore_host_no_port(): + client = DataprocMetastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="metastore.googleapis.com" + ), + ) + assert client.transport._host == "metastore.googleapis.com:443" + + +def test_dataproc_metastore_host_with_port(): + client = DataprocMetastoreClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="metastore.googleapis.com:8000" + ), + ) + assert client.transport._host == "metastore.googleapis.com:8000" + + +def test_dataproc_metastore_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.DataprocMetastoreGrpcTransport( + host="squid.clam.whelk", channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_dataproc_metastore_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.DataprocMetastoreGrpcAsyncIOTransport( + host="squid.clam.whelk", channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.DataprocMetastoreGrpcTransport, + transports.DataprocMetastoreGrpcAsyncIOTransport, + ], +) +def test_dataproc_metastore_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=("https://www.googleapis.com/auth/cloud-platform",), + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.DataprocMetastoreGrpcTransport, + transports.DataprocMetastoreGrpcAsyncIOTransport, + ], +) +def test_dataproc_metastore_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=("https://www.googleapis.com/auth/cloud-platform",), + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_dataproc_metastore_grpc_lro_client(): + client = DataprocMetastoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance(transport.operations_client, operations_v1.OperationsClient,) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_dataproc_metastore_grpc_lro_async_client(): + client = DataprocMetastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc_asyncio", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance(transport.operations_client, operations_v1.OperationsAsyncClient,) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_metadata_import_path(): + project = "squid" + location = "clam" + service = "whelk" + metadata_import = "octopus" + expected = "projects/{project}/locations/{location}/services/{service}/metadataImports/{metadata_import}".format( + project=project, + location=location, + service=service, + metadata_import=metadata_import, + ) + actual = DataprocMetastoreClient.metadata_import_path( + project, location, service, metadata_import + ) + assert expected == actual + + +def test_parse_metadata_import_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + "service": "cuttlefish", + "metadata_import": "mussel", + } + path = DataprocMetastoreClient.metadata_import_path(**expected) + + # Check that the path construction is reversible. + actual = DataprocMetastoreClient.parse_metadata_import_path(path) + assert expected == actual + + +def test_network_path(): + project = "winkle" + network = "nautilus" + expected = "projects/{project}/global/networks/{network}".format( + project=project, network=network, + ) + actual = DataprocMetastoreClient.network_path(project, network) + assert expected == actual + + +def test_parse_network_path(): + expected = { + "project": "scallop", + "network": "abalone", + } + path = DataprocMetastoreClient.network_path(**expected) + + # Check that the path construction is reversible. + actual = DataprocMetastoreClient.parse_network_path(path) + assert expected == actual + + +def test_service_path(): + project = "squid" + location = "clam" + service = "whelk" + expected = "projects/{project}/locations/{location}/services/{service}".format( + project=project, location=location, service=service, + ) + actual = DataprocMetastoreClient.service_path(project, location, service) + assert expected == actual + + +def test_parse_service_path(): + expected = { + "project": "octopus", + "location": "oyster", + "service": "nudibranch", + } + path = DataprocMetastoreClient.service_path(**expected) + + # Check that the path construction is reversible. + actual = DataprocMetastoreClient.parse_service_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "cuttlefish" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = DataprocMetastoreClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "mussel", + } + path = DataprocMetastoreClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = DataprocMetastoreClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "winkle" + expected = "folders/{folder}".format(folder=folder,) + actual = DataprocMetastoreClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nautilus", + } + path = DataprocMetastoreClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = DataprocMetastoreClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "scallop" + expected = "organizations/{organization}".format(organization=organization,) + actual = DataprocMetastoreClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "abalone", + } + path = DataprocMetastoreClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = DataprocMetastoreClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "squid" + expected = "projects/{project}".format(project=project,) + actual = DataprocMetastoreClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "clam", + } + path = DataprocMetastoreClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = DataprocMetastoreClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "whelk" + location = "octopus" + expected = "projects/{project}/locations/{location}".format( + project=project, location=location, + ) + actual = DataprocMetastoreClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + } + path = DataprocMetastoreClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = DataprocMetastoreClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_withDEFAULT_CLIENT_INFO(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.DataprocMetastoreTransport, "_prep_wrapped_messages" + ) as prep: + client = DataprocMetastoreClient( + credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.DataprocMetastoreTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = DataprocMetastoreClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, + ) + prep.assert_called_once_with(client_info) diff --git a/tests/unit/gapic/metastore_v1alpha/__init__.py b/tests/unit/gapic/metastore_v1alpha/__init__.py index 42ffdf2..4de6597 100644 --- a/tests/unit/gapic/metastore_v1alpha/__init__.py +++ b/tests/unit/gapic/metastore_v1alpha/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/unit/gapic/metastore_v1alpha/test_dataproc_metastore.py b/tests/unit/gapic/metastore_v1alpha/test_dataproc_metastore.py index a3309e1..0000398 100644 --- a/tests/unit/gapic/metastore_v1alpha/test_dataproc_metastore.py +++ b/tests/unit/gapic/metastore_v1alpha/test_dataproc_metastore.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,9 +13,9 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import os import mock +import packaging.version import grpc from grpc.experimental import aio @@ -24,16 +23,16 @@ import pytest from proto.marshal.rules.dates import DurationRule, TimestampRule -from google import auth + from google.api_core import client_options -from google.api_core import exceptions +from google.api_core import exceptions as core_exceptions from google.api_core import future from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async from google.api_core import operation_async # type: ignore from google.api_core import operations_v1 -from google.auth import credentials +from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.metastore_v1alpha.services.dataproc_metastore import ( DataprocMetastoreAsyncClient, @@ -43,13 +42,43 @@ ) from google.cloud.metastore_v1alpha.services.dataproc_metastore import pagers from google.cloud.metastore_v1alpha.services.dataproc_metastore import transports +from google.cloud.metastore_v1alpha.services.dataproc_metastore.transports.base import ( + _API_CORE_VERSION, +) +from google.cloud.metastore_v1alpha.services.dataproc_metastore.transports.base import ( + _GOOGLE_AUTH_VERSION, +) from google.cloud.metastore_v1alpha.types import metastore from google.longrunning import operations_pb2 from google.oauth2 import service_account -from google.protobuf import field_mask_pb2 as field_mask # type: ignore -from google.protobuf import timestamp_pb2 as timestamp # type: ignore -from google.protobuf import wrappers_pb2 as wrappers # type: ignore -from google.type import dayofweek_pb2 as dayofweek # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.protobuf import wrappers_pb2 # type: ignore +from google.type import dayofweek_pb2 # type: ignore +import google.auth + + +# TODO(busunkim): Once google-api-core >= 1.26.0 is required: +# - Delete all the api-core and auth "less than" test cases +# - Delete these pytest markers (Make the "greater than or equal to" tests the default). +requires_google_auth_lt_1_25_0 = pytest.mark.skipif( + packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), + reason="This test requires google-auth < 1.25.0", +) +requires_google_auth_gte_1_25_0 = pytest.mark.skipif( + packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), + reason="This test requires google-auth >= 1.25.0", +) + +requires_api_core_lt_1_26_0 = pytest.mark.skipif( + packaging.version.parse(_API_CORE_VERSION) >= packaging.version.parse("1.26.0"), + reason="This test requires google-api-core < 1.26.0", +) + +requires_api_core_gte_1_26_0 = pytest.mark.skipif( + packaging.version.parse(_API_CORE_VERSION) < packaging.version.parse("1.26.0"), + reason="This test requires google-api-core >= 1.26.0", +) def client_cert_source_callback(): @@ -101,7 +130,7 @@ def test__get_default_mtls_endpoint(): "client_class", [DataprocMetastoreClient, DataprocMetastoreAsyncClient,] ) def test_dataproc_metastore_client_from_service_account_info(client_class): - creds = credentials.AnonymousCredentials() + creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_info" ) as factory: @@ -118,7 +147,7 @@ def test_dataproc_metastore_client_from_service_account_info(client_class): "client_class", [DataprocMetastoreClient, DataprocMetastoreAsyncClient,] ) def test_dataproc_metastore_client_from_service_account_file(client_class): - creds = credentials.AnonymousCredentials() + creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_file" ) as factory: @@ -171,7 +200,7 @@ def test_dataproc_metastore_client_client_options( ): # Check that if channel is provided we won't create a new one. with mock.patch.object(DataprocMetastoreClient, "get_transport_class") as gtc: - transport = transport_class(credentials=credentials.AnonymousCredentials()) + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) client = client_class(transport=transport) gtc.assert_not_called() @@ -469,7 +498,7 @@ def test_list_services( transport: str = "grpc", request_type=metastore.ListServicesRequest ): client = DataprocMetastoreClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -482,21 +511,16 @@ def test_list_services( call.return_value = metastore.ListServicesResponse( next_page_token="next_page_token_value", unreachable=["unreachable_value"], ) - response = client.list_services(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == metastore.ListServicesRequest() # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListServicesPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] @@ -508,7 +532,7 @@ def test_list_services_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DataprocMetastoreClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -516,7 +540,6 @@ def test_list_services_empty_call(): client.list_services() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == metastore.ListServicesRequest() @@ -525,7 +548,7 @@ async def test_list_services_async( transport: str = "grpc_asyncio", request_type=metastore.ListServicesRequest ): client = DataprocMetastoreAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -541,20 +564,16 @@ async def test_list_services_async( unreachable=["unreachable_value"], ) ) - response = await client.list_services(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == metastore.ListServicesRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListServicesAsyncPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] @@ -564,17 +583,17 @@ async def test_list_services_async_from_dict(): def test_list_services_field_headers(): - client = DataprocMetastoreClient(credentials=credentials.AnonymousCredentials(),) + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = metastore.ListServicesRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_services), "__call__") as call: call.return_value = metastore.ListServicesResponse() - client.list_services(request) # Establish that the underlying gRPC stub method was called. @@ -590,12 +609,13 @@ def test_list_services_field_headers(): @pytest.mark.asyncio async def test_list_services_field_headers_async(): client = DataprocMetastoreAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = metastore.ListServicesRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -603,7 +623,6 @@ async def test_list_services_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( metastore.ListServicesResponse() ) - await client.list_services(request) # Establish that the underlying gRPC stub method was called. @@ -617,13 +636,12 @@ async def test_list_services_field_headers_async(): def test_list_services_flattened(): - client = DataprocMetastoreClient(credentials=credentials.AnonymousCredentials(),) + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_services), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = metastore.ListServicesResponse() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.list_services(parent="parent_value",) @@ -632,12 +650,11 @@ def test_list_services_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" def test_list_services_flattened_error(): - client = DataprocMetastoreClient(credentials=credentials.AnonymousCredentials(),) + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -650,7 +667,7 @@ def test_list_services_flattened_error(): @pytest.mark.asyncio async def test_list_services_flattened_async(): client = DataprocMetastoreAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -669,14 +686,13 @@ async def test_list_services_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" @pytest.mark.asyncio async def test_list_services_flattened_error_async(): client = DataprocMetastoreAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -688,7 +704,7 @@ async def test_list_services_flattened_error_async(): def test_list_services_pager(): - client = DataprocMetastoreClient(credentials=credentials.AnonymousCredentials,) + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_services), "__call__") as call: @@ -726,7 +742,7 @@ def test_list_services_pager(): def test_list_services_pages(): - client = DataprocMetastoreClient(credentials=credentials.AnonymousCredentials,) + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_services), "__call__") as call: @@ -756,7 +772,9 @@ def test_list_services_pages(): @pytest.mark.asyncio async def test_list_services_async_pager(): - client = DataprocMetastoreAsyncClient(credentials=credentials.AnonymousCredentials,) + client = DataprocMetastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -793,7 +811,9 @@ async def test_list_services_async_pager(): @pytest.mark.asyncio async def test_list_services_async_pages(): - client = DataprocMetastoreAsyncClient(credentials=credentials.AnonymousCredentials,) + client = DataprocMetastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -827,7 +847,7 @@ async def test_list_services_async_pages(): def test_get_service(transport: str = "grpc", request_type=metastore.GetServiceRequest): client = DataprocMetastoreClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -852,37 +872,24 @@ def test_get_service(transport: str = "grpc", request_type=metastore.GetServiceR version="version_value" ), ) - response = client.get_service(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == metastore.GetServiceRequest() # Establish that the response is the type that we expect. - assert isinstance(response, metastore.Service) - assert response.name == "name_value" - assert response.network == "network_value" - assert response.endpoint_uri == "endpoint_uri_value" - assert response.port == 453 - assert response.state == metastore.Service.State.CREATING - assert response.state_message == "state_message_value" - assert response.artifact_gcs_uri == "artifact_gcs_uri_value" - assert response.tier == metastore.Service.Tier.DEVELOPER - assert response.uid == "uid_value" - assert response.release_channel == metastore.Service.ReleaseChannel.CANARY @@ -894,7 +901,7 @@ def test_get_service_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DataprocMetastoreClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -902,7 +909,6 @@ def test_get_service_empty_call(): client.get_service() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == metastore.GetServiceRequest() @@ -911,7 +917,7 @@ async def test_get_service_async( transport: str = "grpc_asyncio", request_type=metastore.GetServiceRequest ): client = DataprocMetastoreAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -935,36 +941,24 @@ async def test_get_service_async( release_channel=metastore.Service.ReleaseChannel.CANARY, ) ) - response = await client.get_service(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == metastore.GetServiceRequest() # Establish that the response is the type that we expect. assert isinstance(response, metastore.Service) - assert response.name == "name_value" - assert response.network == "network_value" - assert response.endpoint_uri == "endpoint_uri_value" - assert response.port == 453 - assert response.state == metastore.Service.State.CREATING - assert response.state_message == "state_message_value" - assert response.artifact_gcs_uri == "artifact_gcs_uri_value" - assert response.tier == metastore.Service.Tier.DEVELOPER - assert response.uid == "uid_value" - assert response.release_channel == metastore.Service.ReleaseChannel.CANARY @@ -974,17 +968,17 @@ async def test_get_service_async_from_dict(): def test_get_service_field_headers(): - client = DataprocMetastoreClient(credentials=credentials.AnonymousCredentials(),) + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = metastore.GetServiceRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_service), "__call__") as call: call.return_value = metastore.Service() - client.get_service(request) # Establish that the underlying gRPC stub method was called. @@ -1000,18 +994,18 @@ def test_get_service_field_headers(): @pytest.mark.asyncio async def test_get_service_field_headers_async(): client = DataprocMetastoreAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = metastore.GetServiceRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_service), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Service()) - await client.get_service(request) # Establish that the underlying gRPC stub method was called. @@ -1025,13 +1019,12 @@ async def test_get_service_field_headers_async(): def test_get_service_flattened(): - client = DataprocMetastoreClient(credentials=credentials.AnonymousCredentials(),) + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_service), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = metastore.Service() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.get_service(name="name_value",) @@ -1040,12 +1033,11 @@ def test_get_service_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" def test_get_service_flattened_error(): - client = DataprocMetastoreClient(credentials=credentials.AnonymousCredentials(),) + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1058,7 +1050,7 @@ def test_get_service_flattened_error(): @pytest.mark.asyncio async def test_get_service_flattened_async(): client = DataprocMetastoreAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1075,14 +1067,13 @@ async def test_get_service_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" @pytest.mark.asyncio async def test_get_service_flattened_error_async(): client = DataprocMetastoreAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -1097,7 +1088,7 @@ def test_create_service( transport: str = "grpc", request_type=metastore.CreateServiceRequest ): client = DataprocMetastoreClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1108,13 +1099,11 @@ def test_create_service( with mock.patch.object(type(client.transport.create_service), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.create_service(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == metastore.CreateServiceRequest() # Establish that the response is the type that we expect. @@ -1129,7 +1118,7 @@ def test_create_service_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DataprocMetastoreClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1137,7 +1126,6 @@ def test_create_service_empty_call(): client.create_service() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == metastore.CreateServiceRequest() @@ -1146,7 +1134,7 @@ async def test_create_service_async( transport: str = "grpc_asyncio", request_type=metastore.CreateServiceRequest ): client = DataprocMetastoreAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1159,13 +1147,11 @@ async def test_create_service_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - response = await client.create_service(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == metastore.CreateServiceRequest() # Establish that the response is the type that we expect. @@ -1178,17 +1164,17 @@ async def test_create_service_async_from_dict(): def test_create_service_field_headers(): - client = DataprocMetastoreClient(credentials=credentials.AnonymousCredentials(),) + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = metastore.CreateServiceRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_service), "__call__") as call: call.return_value = operations_pb2.Operation(name="operations/op") - client.create_service(request) # Establish that the underlying gRPC stub method was called. @@ -1204,12 +1190,13 @@ def test_create_service_field_headers(): @pytest.mark.asyncio async def test_create_service_field_headers_async(): client = DataprocMetastoreAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = metastore.CreateServiceRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1217,7 +1204,6 @@ async def test_create_service_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) - await client.create_service(request) # Establish that the underlying gRPC stub method was called. @@ -1231,13 +1217,12 @@ async def test_create_service_field_headers_async(): def test_create_service_flattened(): - client = DataprocMetastoreClient(credentials=credentials.AnonymousCredentials(),) + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_service), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.create_service( @@ -1254,18 +1239,15 @@ def test_create_service_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" - assert args[0].service == metastore.Service( hive_metastore_config=metastore.HiveMetastoreConfig(version="version_value") ) - assert args[0].service_id == "service_id_value" def test_create_service_flattened_error(): - client = DataprocMetastoreClient(credentials=credentials.AnonymousCredentials(),) + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1285,7 +1267,7 @@ def test_create_service_flattened_error(): @pytest.mark.asyncio async def test_create_service_flattened_async(): client = DataprocMetastoreAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1312,20 +1294,17 @@ async def test_create_service_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" - assert args[0].service == metastore.Service( hive_metastore_config=metastore.HiveMetastoreConfig(version="version_value") ) - assert args[0].service_id == "service_id_value" @pytest.mark.asyncio async def test_create_service_flattened_error_async(): client = DataprocMetastoreAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -1347,7 +1326,7 @@ def test_update_service( transport: str = "grpc", request_type=metastore.UpdateServiceRequest ): client = DataprocMetastoreClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1358,13 +1337,11 @@ def test_update_service( with mock.patch.object(type(client.transport.update_service), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.update_service(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == metastore.UpdateServiceRequest() # Establish that the response is the type that we expect. @@ -1379,7 +1356,7 @@ def test_update_service_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DataprocMetastoreClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1387,7 +1364,6 @@ def test_update_service_empty_call(): client.update_service() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == metastore.UpdateServiceRequest() @@ -1396,7 +1372,7 @@ async def test_update_service_async( transport: str = "grpc_asyncio", request_type=metastore.UpdateServiceRequest ): client = DataprocMetastoreAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1409,13 +1385,11 @@ async def test_update_service_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - response = await client.update_service(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == metastore.UpdateServiceRequest() # Establish that the response is the type that we expect. @@ -1428,17 +1402,17 @@ async def test_update_service_async_from_dict(): def test_update_service_field_headers(): - client = DataprocMetastoreClient(credentials=credentials.AnonymousCredentials(),) + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = metastore.UpdateServiceRequest() + request.service.name = "service.name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_service), "__call__") as call: call.return_value = operations_pb2.Operation(name="operations/op") - client.update_service(request) # Establish that the underlying gRPC stub method was called. @@ -1456,12 +1430,13 @@ def test_update_service_field_headers(): @pytest.mark.asyncio async def test_update_service_field_headers_async(): client = DataprocMetastoreAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = metastore.UpdateServiceRequest() + request.service.name = "service.name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1469,7 +1444,6 @@ async def test_update_service_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) - await client.update_service(request) # Establish that the underlying gRPC stub method was called. @@ -1485,13 +1459,12 @@ async def test_update_service_field_headers_async(): def test_update_service_flattened(): - client = DataprocMetastoreClient(credentials=credentials.AnonymousCredentials(),) + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_service), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.update_service( @@ -1500,23 +1473,21 @@ def test_update_service_flattened(): version="version_value" ) ), - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].service == metastore.Service( hive_metastore_config=metastore.HiveMetastoreConfig(version="version_value") ) - - assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"]) + assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) def test_update_service_flattened_error(): - client = DataprocMetastoreClient(credentials=credentials.AnonymousCredentials(),) + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1528,14 +1499,14 @@ def test_update_service_flattened_error(): version="version_value" ) ), - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.asyncio async def test_update_service_flattened_async(): client = DataprocMetastoreAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1554,25 +1525,23 @@ async def test_update_service_flattened_async(): version="version_value" ) ), - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].service == metastore.Service( hive_metastore_config=metastore.HiveMetastoreConfig(version="version_value") ) - - assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"]) + assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) @pytest.mark.asyncio async def test_update_service_flattened_error_async(): client = DataprocMetastoreAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -1585,7 +1554,7 @@ async def test_update_service_flattened_error_async(): version="version_value" ) ), - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @@ -1593,7 +1562,7 @@ def test_delete_service( transport: str = "grpc", request_type=metastore.DeleteServiceRequest ): client = DataprocMetastoreClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1604,13 +1573,11 @@ def test_delete_service( with mock.patch.object(type(client.transport.delete_service), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.delete_service(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == metastore.DeleteServiceRequest() # Establish that the response is the type that we expect. @@ -1625,7 +1592,7 @@ def test_delete_service_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DataprocMetastoreClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1633,7 +1600,6 @@ def test_delete_service_empty_call(): client.delete_service() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == metastore.DeleteServiceRequest() @@ -1642,7 +1608,7 @@ async def test_delete_service_async( transport: str = "grpc_asyncio", request_type=metastore.DeleteServiceRequest ): client = DataprocMetastoreAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1655,13 +1621,11 @@ async def test_delete_service_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - response = await client.delete_service(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == metastore.DeleteServiceRequest() # Establish that the response is the type that we expect. @@ -1674,17 +1638,17 @@ async def test_delete_service_async_from_dict(): def test_delete_service_field_headers(): - client = DataprocMetastoreClient(credentials=credentials.AnonymousCredentials(),) + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = metastore.DeleteServiceRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_service), "__call__") as call: call.return_value = operations_pb2.Operation(name="operations/op") - client.delete_service(request) # Establish that the underlying gRPC stub method was called. @@ -1700,12 +1664,13 @@ def test_delete_service_field_headers(): @pytest.mark.asyncio async def test_delete_service_field_headers_async(): client = DataprocMetastoreAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = metastore.DeleteServiceRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1713,7 +1678,6 @@ async def test_delete_service_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) - await client.delete_service(request) # Establish that the underlying gRPC stub method was called. @@ -1727,13 +1691,12 @@ async def test_delete_service_field_headers_async(): def test_delete_service_flattened(): - client = DataprocMetastoreClient(credentials=credentials.AnonymousCredentials(),) + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_service), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.delete_service(name="name_value",) @@ -1742,12 +1705,11 @@ def test_delete_service_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" def test_delete_service_flattened_error(): - client = DataprocMetastoreClient(credentials=credentials.AnonymousCredentials(),) + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1760,7 +1722,7 @@ def test_delete_service_flattened_error(): @pytest.mark.asyncio async def test_delete_service_flattened_async(): client = DataprocMetastoreAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1779,14 +1741,13 @@ async def test_delete_service_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" @pytest.mark.asyncio async def test_delete_service_flattened_error_async(): client = DataprocMetastoreAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -1801,7 +1762,7 @@ def test_list_metadata_imports( transport: str = "grpc", request_type=metastore.ListMetadataImportsRequest ): client = DataprocMetastoreClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1816,21 +1777,16 @@ def test_list_metadata_imports( call.return_value = metastore.ListMetadataImportsResponse( next_page_token="next_page_token_value", unreachable=["unreachable_value"], ) - response = client.list_metadata_imports(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == metastore.ListMetadataImportsRequest() # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListMetadataImportsPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] @@ -1842,7 +1798,7 @@ def test_list_metadata_imports_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DataprocMetastoreClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1852,7 +1808,6 @@ def test_list_metadata_imports_empty_call(): client.list_metadata_imports() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == metastore.ListMetadataImportsRequest() @@ -1861,7 +1816,7 @@ async def test_list_metadata_imports_async( transport: str = "grpc_asyncio", request_type=metastore.ListMetadataImportsRequest ): client = DataprocMetastoreAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1879,20 +1834,16 @@ async def test_list_metadata_imports_async( unreachable=["unreachable_value"], ) ) - response = await client.list_metadata_imports(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == metastore.ListMetadataImportsRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListMetadataImportsAsyncPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] @@ -1902,11 +1853,12 @@ async def test_list_metadata_imports_async_from_dict(): def test_list_metadata_imports_field_headers(): - client = DataprocMetastoreClient(credentials=credentials.AnonymousCredentials(),) + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = metastore.ListMetadataImportsRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1914,7 +1866,6 @@ def test_list_metadata_imports_field_headers(): type(client.transport.list_metadata_imports), "__call__" ) as call: call.return_value = metastore.ListMetadataImportsResponse() - client.list_metadata_imports(request) # Establish that the underlying gRPC stub method was called. @@ -1930,12 +1881,13 @@ def test_list_metadata_imports_field_headers(): @pytest.mark.asyncio async def test_list_metadata_imports_field_headers_async(): client = DataprocMetastoreAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = metastore.ListMetadataImportsRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1945,7 +1897,6 @@ async def test_list_metadata_imports_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( metastore.ListMetadataImportsResponse() ) - await client.list_metadata_imports(request) # Establish that the underlying gRPC stub method was called. @@ -1959,7 +1910,7 @@ async def test_list_metadata_imports_field_headers_async(): def test_list_metadata_imports_flattened(): - client = DataprocMetastoreClient(credentials=credentials.AnonymousCredentials(),) + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1967,7 +1918,6 @@ def test_list_metadata_imports_flattened(): ) as call: # Designate an appropriate return value for the call. call.return_value = metastore.ListMetadataImportsResponse() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.list_metadata_imports(parent="parent_value",) @@ -1976,12 +1926,11 @@ def test_list_metadata_imports_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" def test_list_metadata_imports_flattened_error(): - client = DataprocMetastoreClient(credentials=credentials.AnonymousCredentials(),) + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1994,7 +1943,7 @@ def test_list_metadata_imports_flattened_error(): @pytest.mark.asyncio async def test_list_metadata_imports_flattened_async(): client = DataprocMetastoreAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2015,14 +1964,13 @@ async def test_list_metadata_imports_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" @pytest.mark.asyncio async def test_list_metadata_imports_flattened_error_async(): client = DataprocMetastoreAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -2034,7 +1982,7 @@ async def test_list_metadata_imports_flattened_error_async(): def test_list_metadata_imports_pager(): - client = DataprocMetastoreClient(credentials=credentials.AnonymousCredentials,) + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2079,7 +2027,7 @@ def test_list_metadata_imports_pager(): def test_list_metadata_imports_pages(): - client = DataprocMetastoreClient(credentials=credentials.AnonymousCredentials,) + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2116,7 +2064,9 @@ def test_list_metadata_imports_pages(): @pytest.mark.asyncio async def test_list_metadata_imports_async_pager(): - client = DataprocMetastoreAsyncClient(credentials=credentials.AnonymousCredentials,) + client = DataprocMetastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2160,7 +2110,9 @@ async def test_list_metadata_imports_async_pager(): @pytest.mark.asyncio async def test_list_metadata_imports_async_pages(): - client = DataprocMetastoreAsyncClient(credentials=credentials.AnonymousCredentials,) + client = DataprocMetastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2203,7 +2155,7 @@ def test_get_metadata_import( transport: str = "grpc", request_type=metastore.GetMetadataImportRequest ): client = DataprocMetastoreClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2223,23 +2175,17 @@ def test_get_metadata_import( database_type=metastore.MetadataImport.DatabaseDump.DatabaseType.MYSQL ), ) - response = client.get_metadata_import(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == metastore.GetMetadataImportRequest() # Establish that the response is the type that we expect. - assert isinstance(response, metastore.MetadataImport) - assert response.name == "name_value" - assert response.description == "description_value" - assert response.state == metastore.MetadataImport.State.RUNNING @@ -2251,7 +2197,7 @@ def test_get_metadata_import_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DataprocMetastoreClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2261,7 +2207,6 @@ def test_get_metadata_import_empty_call(): client.get_metadata_import() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == metastore.GetMetadataImportRequest() @@ -2270,7 +2215,7 @@ async def test_get_metadata_import_async( transport: str = "grpc_asyncio", request_type=metastore.GetMetadataImportRequest ): client = DataprocMetastoreAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2289,22 +2234,17 @@ async def test_get_metadata_import_async( state=metastore.MetadataImport.State.RUNNING, ) ) - response = await client.get_metadata_import(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == metastore.GetMetadataImportRequest() # Establish that the response is the type that we expect. assert isinstance(response, metastore.MetadataImport) - assert response.name == "name_value" - assert response.description == "description_value" - assert response.state == metastore.MetadataImport.State.RUNNING @@ -2314,11 +2254,12 @@ async def test_get_metadata_import_async_from_dict(): def test_get_metadata_import_field_headers(): - client = DataprocMetastoreClient(credentials=credentials.AnonymousCredentials(),) + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = metastore.GetMetadataImportRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -2326,7 +2267,6 @@ def test_get_metadata_import_field_headers(): type(client.transport.get_metadata_import), "__call__" ) as call: call.return_value = metastore.MetadataImport() - client.get_metadata_import(request) # Establish that the underlying gRPC stub method was called. @@ -2342,12 +2282,13 @@ def test_get_metadata_import_field_headers(): @pytest.mark.asyncio async def test_get_metadata_import_field_headers_async(): client = DataprocMetastoreAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = metastore.GetMetadataImportRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -2357,7 +2298,6 @@ async def test_get_metadata_import_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( metastore.MetadataImport() ) - await client.get_metadata_import(request) # Establish that the underlying gRPC stub method was called. @@ -2371,7 +2311,7 @@ async def test_get_metadata_import_field_headers_async(): def test_get_metadata_import_flattened(): - client = DataprocMetastoreClient(credentials=credentials.AnonymousCredentials(),) + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2379,7 +2319,6 @@ def test_get_metadata_import_flattened(): ) as call: # Designate an appropriate return value for the call. call.return_value = metastore.MetadataImport() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.get_metadata_import(name="name_value",) @@ -2388,12 +2327,11 @@ def test_get_metadata_import_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" def test_get_metadata_import_flattened_error(): - client = DataprocMetastoreClient(credentials=credentials.AnonymousCredentials(),) + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2406,7 +2344,7 @@ def test_get_metadata_import_flattened_error(): @pytest.mark.asyncio async def test_get_metadata_import_flattened_async(): client = DataprocMetastoreAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2427,14 +2365,13 @@ async def test_get_metadata_import_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" @pytest.mark.asyncio async def test_get_metadata_import_flattened_error_async(): client = DataprocMetastoreAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -2449,7 +2386,7 @@ def test_create_metadata_import( transport: str = "grpc", request_type=metastore.CreateMetadataImportRequest ): client = DataprocMetastoreClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2462,13 +2399,11 @@ def test_create_metadata_import( ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.create_metadata_import(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == metastore.CreateMetadataImportRequest() # Establish that the response is the type that we expect. @@ -2483,7 +2418,7 @@ def test_create_metadata_import_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DataprocMetastoreClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2493,7 +2428,6 @@ def test_create_metadata_import_empty_call(): client.create_metadata_import() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == metastore.CreateMetadataImportRequest() @@ -2502,7 +2436,7 @@ async def test_create_metadata_import_async( transport: str = "grpc_asyncio", request_type=metastore.CreateMetadataImportRequest ): client = DataprocMetastoreAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2517,13 +2451,11 @@ async def test_create_metadata_import_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - response = await client.create_metadata_import(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == metastore.CreateMetadataImportRequest() # Establish that the response is the type that we expect. @@ -2536,11 +2468,12 @@ async def test_create_metadata_import_async_from_dict(): def test_create_metadata_import_field_headers(): - client = DataprocMetastoreClient(credentials=credentials.AnonymousCredentials(),) + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = metastore.CreateMetadataImportRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -2548,7 +2481,6 @@ def test_create_metadata_import_field_headers(): type(client.transport.create_metadata_import), "__call__" ) as call: call.return_value = operations_pb2.Operation(name="operations/op") - client.create_metadata_import(request) # Establish that the underlying gRPC stub method was called. @@ -2564,12 +2496,13 @@ def test_create_metadata_import_field_headers(): @pytest.mark.asyncio async def test_create_metadata_import_field_headers_async(): client = DataprocMetastoreAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = metastore.CreateMetadataImportRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -2579,7 +2512,6 @@ async def test_create_metadata_import_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) - await client.create_metadata_import(request) # Establish that the underlying gRPC stub method was called. @@ -2593,7 +2525,7 @@ async def test_create_metadata_import_field_headers_async(): def test_create_metadata_import_flattened(): - client = DataprocMetastoreClient(credentials=credentials.AnonymousCredentials(),) + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2601,7 +2533,6 @@ def test_create_metadata_import_flattened(): ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.create_metadata_import( @@ -2618,20 +2549,17 @@ def test_create_metadata_import_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" - assert args[0].metadata_import == metastore.MetadataImport( database_dump=metastore.MetadataImport.DatabaseDump( database_type=metastore.MetadataImport.DatabaseDump.DatabaseType.MYSQL ) ) - assert args[0].metadata_import_id == "metadata_import_id_value" def test_create_metadata_import_flattened_error(): - client = DataprocMetastoreClient(credentials=credentials.AnonymousCredentials(),) + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2651,7 +2579,7 @@ def test_create_metadata_import_flattened_error(): @pytest.mark.asyncio async def test_create_metadata_import_flattened_async(): client = DataprocMetastoreAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2680,22 +2608,19 @@ async def test_create_metadata_import_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" - assert args[0].metadata_import == metastore.MetadataImport( database_dump=metastore.MetadataImport.DatabaseDump( database_type=metastore.MetadataImport.DatabaseDump.DatabaseType.MYSQL ) ) - assert args[0].metadata_import_id == "metadata_import_id_value" @pytest.mark.asyncio async def test_create_metadata_import_flattened_error_async(): client = DataprocMetastoreAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -2717,7 +2642,7 @@ def test_update_metadata_import( transport: str = "grpc", request_type=metastore.UpdateMetadataImportRequest ): client = DataprocMetastoreClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2730,13 +2655,11 @@ def test_update_metadata_import( ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.update_metadata_import(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == metastore.UpdateMetadataImportRequest() # Establish that the response is the type that we expect. @@ -2751,7 +2674,7 @@ def test_update_metadata_import_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DataprocMetastoreClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2761,7 +2684,6 @@ def test_update_metadata_import_empty_call(): client.update_metadata_import() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == metastore.UpdateMetadataImportRequest() @@ -2770,7 +2692,7 @@ async def test_update_metadata_import_async( transport: str = "grpc_asyncio", request_type=metastore.UpdateMetadataImportRequest ): client = DataprocMetastoreAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2785,13 +2707,11 @@ async def test_update_metadata_import_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - response = await client.update_metadata_import(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == metastore.UpdateMetadataImportRequest() # Establish that the response is the type that we expect. @@ -2804,11 +2724,12 @@ async def test_update_metadata_import_async_from_dict(): def test_update_metadata_import_field_headers(): - client = DataprocMetastoreClient(credentials=credentials.AnonymousCredentials(),) + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = metastore.UpdateMetadataImportRequest() + request.metadata_import.name = "metadata_import.name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -2816,7 +2737,6 @@ def test_update_metadata_import_field_headers(): type(client.transport.update_metadata_import), "__call__" ) as call: call.return_value = operations_pb2.Operation(name="operations/op") - client.update_metadata_import(request) # Establish that the underlying gRPC stub method was called. @@ -2835,12 +2755,13 @@ def test_update_metadata_import_field_headers(): @pytest.mark.asyncio async def test_update_metadata_import_field_headers_async(): client = DataprocMetastoreAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = metastore.UpdateMetadataImportRequest() + request.metadata_import.name = "metadata_import.name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -2850,7 +2771,6 @@ async def test_update_metadata_import_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) - await client.update_metadata_import(request) # Establish that the underlying gRPC stub method was called. @@ -2867,7 +2787,7 @@ async def test_update_metadata_import_field_headers_async(): def test_update_metadata_import_flattened(): - client = DataprocMetastoreClient(credentials=credentials.AnonymousCredentials(),) + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2875,7 +2795,6 @@ def test_update_metadata_import_flattened(): ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.update_metadata_import( @@ -2884,25 +2803,23 @@ def test_update_metadata_import_flattened(): database_type=metastore.MetadataImport.DatabaseDump.DatabaseType.MYSQL ) ), - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].metadata_import == metastore.MetadataImport( database_dump=metastore.MetadataImport.DatabaseDump( database_type=metastore.MetadataImport.DatabaseDump.DatabaseType.MYSQL ) ) - - assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"]) + assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) def test_update_metadata_import_flattened_error(): - client = DataprocMetastoreClient(credentials=credentials.AnonymousCredentials(),) + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2914,14 +2831,14 @@ def test_update_metadata_import_flattened_error(): database_type=metastore.MetadataImport.DatabaseDump.DatabaseType.MYSQL ) ), - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.asyncio async def test_update_metadata_import_flattened_async(): client = DataprocMetastoreAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2942,27 +2859,25 @@ async def test_update_metadata_import_flattened_async(): database_type=metastore.MetadataImport.DatabaseDump.DatabaseType.MYSQL ) ), - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].metadata_import == metastore.MetadataImport( database_dump=metastore.MetadataImport.DatabaseDump( database_type=metastore.MetadataImport.DatabaseDump.DatabaseType.MYSQL ) ) - - assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"]) + assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) @pytest.mark.asyncio async def test_update_metadata_import_flattened_error_async(): client = DataprocMetastoreAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -2975,7 +2890,7 @@ async def test_update_metadata_import_flattened_error_async(): database_type=metastore.MetadataImport.DatabaseDump.DatabaseType.MYSQL ) ), - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @@ -2983,7 +2898,7 @@ def test_export_metadata( transport: str = "grpc", request_type=metastore.ExportMetadataRequest ): client = DataprocMetastoreClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2994,13 +2909,11 @@ def test_export_metadata( with mock.patch.object(type(client.transport.export_metadata), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.export_metadata(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == metastore.ExportMetadataRequest() # Establish that the response is the type that we expect. @@ -3015,7 +2928,7 @@ def test_export_metadata_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DataprocMetastoreClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3023,7 +2936,6 @@ def test_export_metadata_empty_call(): client.export_metadata() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == metastore.ExportMetadataRequest() @@ -3032,7 +2944,7 @@ async def test_export_metadata_async( transport: str = "grpc_asyncio", request_type=metastore.ExportMetadataRequest ): client = DataprocMetastoreAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3045,13 +2957,11 @@ async def test_export_metadata_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - response = await client.export_metadata(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == metastore.ExportMetadataRequest() # Establish that the response is the type that we expect. @@ -3064,17 +2974,17 @@ async def test_export_metadata_async_from_dict(): def test_export_metadata_field_headers(): - client = DataprocMetastoreClient(credentials=credentials.AnonymousCredentials(),) + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = metastore.ExportMetadataRequest() + request.service = "service/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.export_metadata), "__call__") as call: call.return_value = operations_pb2.Operation(name="operations/op") - client.export_metadata(request) # Establish that the underlying gRPC stub method was called. @@ -3090,12 +3000,13 @@ def test_export_metadata_field_headers(): @pytest.mark.asyncio async def test_export_metadata_field_headers_async(): client = DataprocMetastoreAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = metastore.ExportMetadataRequest() + request.service = "service/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -3103,7 +3014,6 @@ async def test_export_metadata_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) - await client.export_metadata(request) # Establish that the underlying gRPC stub method was called. @@ -3120,7 +3030,7 @@ def test_restore_service( transport: str = "grpc", request_type=metastore.RestoreServiceRequest ): client = DataprocMetastoreClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3131,13 +3041,11 @@ def test_restore_service( with mock.patch.object(type(client.transport.restore_service), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.restore_service(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == metastore.RestoreServiceRequest() # Establish that the response is the type that we expect. @@ -3152,7 +3060,7 @@ def test_restore_service_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DataprocMetastoreClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3160,7 +3068,6 @@ def test_restore_service_empty_call(): client.restore_service() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == metastore.RestoreServiceRequest() @@ -3169,7 +3076,7 @@ async def test_restore_service_async( transport: str = "grpc_asyncio", request_type=metastore.RestoreServiceRequest ): client = DataprocMetastoreAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3182,13 +3089,11 @@ async def test_restore_service_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - response = await client.restore_service(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == metastore.RestoreServiceRequest() # Establish that the response is the type that we expect. @@ -3201,17 +3106,17 @@ async def test_restore_service_async_from_dict(): def test_restore_service_field_headers(): - client = DataprocMetastoreClient(credentials=credentials.AnonymousCredentials(),) + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = metastore.RestoreServiceRequest() + request.service = "service/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.restore_service), "__call__") as call: call.return_value = operations_pb2.Operation(name="operations/op") - client.restore_service(request) # Establish that the underlying gRPC stub method was called. @@ -3227,12 +3132,13 @@ def test_restore_service_field_headers(): @pytest.mark.asyncio async def test_restore_service_field_headers_async(): client = DataprocMetastoreAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = metastore.RestoreServiceRequest() + request.service = "service/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -3240,7 +3146,6 @@ async def test_restore_service_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) - await client.restore_service(request) # Establish that the underlying gRPC stub method was called. @@ -3254,13 +3159,12 @@ async def test_restore_service_field_headers_async(): def test_restore_service_flattened(): - client = DataprocMetastoreClient(credentials=credentials.AnonymousCredentials(),) + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.restore_service), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.restore_service( @@ -3271,14 +3175,12 @@ def test_restore_service_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].service == "service_value" - assert args[0].backup == "backup_value" def test_restore_service_flattened_error(): - client = DataprocMetastoreClient(credentials=credentials.AnonymousCredentials(),) + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -3293,7 +3195,7 @@ def test_restore_service_flattened_error(): @pytest.mark.asyncio async def test_restore_service_flattened_async(): client = DataprocMetastoreAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3314,16 +3216,14 @@ async def test_restore_service_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].service == "service_value" - assert args[0].backup == "backup_value" @pytest.mark.asyncio async def test_restore_service_flattened_error_async(): client = DataprocMetastoreAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -3340,7 +3240,7 @@ def test_list_backups( transport: str = "grpc", request_type=metastore.ListBackupsRequest ): client = DataprocMetastoreClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3353,21 +3253,16 @@ def test_list_backups( call.return_value = metastore.ListBackupsResponse( next_page_token="next_page_token_value", unreachable=["unreachable_value"], ) - response = client.list_backups(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == metastore.ListBackupsRequest() # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListBackupsPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] @@ -3379,7 +3274,7 @@ def test_list_backups_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DataprocMetastoreClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3387,7 +3282,6 @@ def test_list_backups_empty_call(): client.list_backups() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == metastore.ListBackupsRequest() @@ -3396,7 +3290,7 @@ async def test_list_backups_async( transport: str = "grpc_asyncio", request_type=metastore.ListBackupsRequest ): client = DataprocMetastoreAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3412,20 +3306,16 @@ async def test_list_backups_async( unreachable=["unreachable_value"], ) ) - response = await client.list_backups(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == metastore.ListBackupsRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListBackupsAsyncPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] @@ -3435,17 +3325,17 @@ async def test_list_backups_async_from_dict(): def test_list_backups_field_headers(): - client = DataprocMetastoreClient(credentials=credentials.AnonymousCredentials(),) + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = metastore.ListBackupsRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_backups), "__call__") as call: call.return_value = metastore.ListBackupsResponse() - client.list_backups(request) # Establish that the underlying gRPC stub method was called. @@ -3461,12 +3351,13 @@ def test_list_backups_field_headers(): @pytest.mark.asyncio async def test_list_backups_field_headers_async(): client = DataprocMetastoreAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = metastore.ListBackupsRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -3474,7 +3365,6 @@ async def test_list_backups_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( metastore.ListBackupsResponse() ) - await client.list_backups(request) # Establish that the underlying gRPC stub method was called. @@ -3488,13 +3378,12 @@ async def test_list_backups_field_headers_async(): def test_list_backups_flattened(): - client = DataprocMetastoreClient(credentials=credentials.AnonymousCredentials(),) + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_backups), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = metastore.ListBackupsResponse() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.list_backups(parent="parent_value",) @@ -3503,12 +3392,11 @@ def test_list_backups_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" def test_list_backups_flattened_error(): - client = DataprocMetastoreClient(credentials=credentials.AnonymousCredentials(),) + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -3521,7 +3409,7 @@ def test_list_backups_flattened_error(): @pytest.mark.asyncio async def test_list_backups_flattened_async(): client = DataprocMetastoreAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3540,14 +3428,13 @@ async def test_list_backups_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" @pytest.mark.asyncio async def test_list_backups_flattened_error_async(): client = DataprocMetastoreAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -3559,7 +3446,7 @@ async def test_list_backups_flattened_error_async(): def test_list_backups_pager(): - client = DataprocMetastoreClient(credentials=credentials.AnonymousCredentials,) + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_backups), "__call__") as call: @@ -3593,7 +3480,7 @@ def test_list_backups_pager(): def test_list_backups_pages(): - client = DataprocMetastoreClient(credentials=credentials.AnonymousCredentials,) + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_backups), "__call__") as call: @@ -3619,7 +3506,9 @@ def test_list_backups_pages(): @pytest.mark.asyncio async def test_list_backups_async_pager(): - client = DataprocMetastoreAsyncClient(credentials=credentials.AnonymousCredentials,) + client = DataprocMetastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -3652,7 +3541,9 @@ async def test_list_backups_async_pager(): @pytest.mark.asyncio async def test_list_backups_async_pages(): - client = DataprocMetastoreAsyncClient(credentials=credentials.AnonymousCredentials,) + client = DataprocMetastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -3682,7 +3573,7 @@ async def test_list_backups_async_pages(): def test_get_backup(transport: str = "grpc", request_type=metastore.GetBackupRequest): client = DataprocMetastoreClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3697,23 +3588,17 @@ def test_get_backup(transport: str = "grpc", request_type=metastore.GetBackupReq state=metastore.Backup.State.CREATING, description="description_value", ) - response = client.get_backup(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == metastore.GetBackupRequest() # Establish that the response is the type that we expect. - assert isinstance(response, metastore.Backup) - assert response.name == "name_value" - assert response.state == metastore.Backup.State.CREATING - assert response.description == "description_value" @@ -3725,7 +3610,7 @@ def test_get_backup_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DataprocMetastoreClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3733,7 +3618,6 @@ def test_get_backup_empty_call(): client.get_backup() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == metastore.GetBackupRequest() @@ -3742,7 +3626,7 @@ async def test_get_backup_async( transport: str = "grpc_asyncio", request_type=metastore.GetBackupRequest ): client = DataprocMetastoreAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3759,22 +3643,17 @@ async def test_get_backup_async( description="description_value", ) ) - response = await client.get_backup(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == metastore.GetBackupRequest() # Establish that the response is the type that we expect. assert isinstance(response, metastore.Backup) - assert response.name == "name_value" - assert response.state == metastore.Backup.State.CREATING - assert response.description == "description_value" @@ -3784,17 +3663,17 @@ async def test_get_backup_async_from_dict(): def test_get_backup_field_headers(): - client = DataprocMetastoreClient(credentials=credentials.AnonymousCredentials(),) + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = metastore.GetBackupRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_backup), "__call__") as call: call.return_value = metastore.Backup() - client.get_backup(request) # Establish that the underlying gRPC stub method was called. @@ -3810,18 +3689,18 @@ def test_get_backup_field_headers(): @pytest.mark.asyncio async def test_get_backup_field_headers_async(): client = DataprocMetastoreAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = metastore.GetBackupRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_backup), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Backup()) - await client.get_backup(request) # Establish that the underlying gRPC stub method was called. @@ -3835,13 +3714,12 @@ async def test_get_backup_field_headers_async(): def test_get_backup_flattened(): - client = DataprocMetastoreClient(credentials=credentials.AnonymousCredentials(),) + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_backup), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = metastore.Backup() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.get_backup(name="name_value",) @@ -3850,12 +3728,11 @@ def test_get_backup_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" def test_get_backup_flattened_error(): - client = DataprocMetastoreClient(credentials=credentials.AnonymousCredentials(),) + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -3868,7 +3745,7 @@ def test_get_backup_flattened_error(): @pytest.mark.asyncio async def test_get_backup_flattened_async(): client = DataprocMetastoreAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3885,14 +3762,13 @@ async def test_get_backup_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" @pytest.mark.asyncio async def test_get_backup_flattened_error_async(): client = DataprocMetastoreAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -3907,7 +3783,7 @@ def test_create_backup( transport: str = "grpc", request_type=metastore.CreateBackupRequest ): client = DataprocMetastoreClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3918,13 +3794,11 @@ def test_create_backup( with mock.patch.object(type(client.transport.create_backup), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.create_backup(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == metastore.CreateBackupRequest() # Establish that the response is the type that we expect. @@ -3939,7 +3813,7 @@ def test_create_backup_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DataprocMetastoreClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3947,7 +3821,6 @@ def test_create_backup_empty_call(): client.create_backup() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == metastore.CreateBackupRequest() @@ -3956,7 +3829,7 @@ async def test_create_backup_async( transport: str = "grpc_asyncio", request_type=metastore.CreateBackupRequest ): client = DataprocMetastoreAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3969,13 +3842,11 @@ async def test_create_backup_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - response = await client.create_backup(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == metastore.CreateBackupRequest() # Establish that the response is the type that we expect. @@ -3988,17 +3859,17 @@ async def test_create_backup_async_from_dict(): def test_create_backup_field_headers(): - client = DataprocMetastoreClient(credentials=credentials.AnonymousCredentials(),) + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = metastore.CreateBackupRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_backup), "__call__") as call: call.return_value = operations_pb2.Operation(name="operations/op") - client.create_backup(request) # Establish that the underlying gRPC stub method was called. @@ -4014,12 +3885,13 @@ def test_create_backup_field_headers(): @pytest.mark.asyncio async def test_create_backup_field_headers_async(): client = DataprocMetastoreAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = metastore.CreateBackupRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -4027,7 +3899,6 @@ async def test_create_backup_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) - await client.create_backup(request) # Establish that the underlying gRPC stub method was called. @@ -4041,13 +3912,12 @@ async def test_create_backup_field_headers_async(): def test_create_backup_flattened(): - client = DataprocMetastoreClient(credentials=credentials.AnonymousCredentials(),) + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_backup), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.create_backup( @@ -4060,16 +3930,13 @@ def test_create_backup_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" - assert args[0].backup == metastore.Backup(name="name_value") - assert args[0].backup_id == "backup_id_value" def test_create_backup_flattened_error(): - client = DataprocMetastoreClient(credentials=credentials.AnonymousCredentials(),) + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -4085,7 +3952,7 @@ def test_create_backup_flattened_error(): @pytest.mark.asyncio async def test_create_backup_flattened_async(): client = DataprocMetastoreAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4108,18 +3975,15 @@ async def test_create_backup_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" - assert args[0].backup == metastore.Backup(name="name_value") - assert args[0].backup_id == "backup_id_value" @pytest.mark.asyncio async def test_create_backup_flattened_error_async(): client = DataprocMetastoreAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -4137,7 +4001,7 @@ def test_delete_backup( transport: str = "grpc", request_type=metastore.DeleteBackupRequest ): client = DataprocMetastoreClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -4148,13 +4012,11 @@ def test_delete_backup( with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.delete_backup(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == metastore.DeleteBackupRequest() # Establish that the response is the type that we expect. @@ -4169,7 +4031,7 @@ def test_delete_backup_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DataprocMetastoreClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4177,7 +4039,6 @@ def test_delete_backup_empty_call(): client.delete_backup() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == metastore.DeleteBackupRequest() @@ -4186,7 +4047,7 @@ async def test_delete_backup_async( transport: str = "grpc_asyncio", request_type=metastore.DeleteBackupRequest ): client = DataprocMetastoreAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -4199,13 +4060,11 @@ async def test_delete_backup_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - response = await client.delete_backup(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == metastore.DeleteBackupRequest() # Establish that the response is the type that we expect. @@ -4218,17 +4077,17 @@ async def test_delete_backup_async_from_dict(): def test_delete_backup_field_headers(): - client = DataprocMetastoreClient(credentials=credentials.AnonymousCredentials(),) + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = metastore.DeleteBackupRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: call.return_value = operations_pb2.Operation(name="operations/op") - client.delete_backup(request) # Establish that the underlying gRPC stub method was called. @@ -4244,12 +4103,13 @@ def test_delete_backup_field_headers(): @pytest.mark.asyncio async def test_delete_backup_field_headers_async(): client = DataprocMetastoreAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = metastore.DeleteBackupRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -4257,7 +4117,6 @@ async def test_delete_backup_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) - await client.delete_backup(request) # Establish that the underlying gRPC stub method was called. @@ -4271,13 +4130,12 @@ async def test_delete_backup_field_headers_async(): def test_delete_backup_flattened(): - client = DataprocMetastoreClient(credentials=credentials.AnonymousCredentials(),) + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.delete_backup(name="name_value",) @@ -4286,12 +4144,11 @@ def test_delete_backup_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" def test_delete_backup_flattened_error(): - client = DataprocMetastoreClient(credentials=credentials.AnonymousCredentials(),) + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -4304,7 +4161,7 @@ def test_delete_backup_flattened_error(): @pytest.mark.asyncio async def test_delete_backup_flattened_async(): client = DataprocMetastoreAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4323,14 +4180,13 @@ async def test_delete_backup_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" @pytest.mark.asyncio async def test_delete_backup_flattened_error_async(): client = DataprocMetastoreAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -4344,16 +4200,16 @@ async def test_delete_backup_flattened_error_async(): def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.DataprocMetastoreGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = DataprocMetastoreClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # It is an error to provide a credentials file and a transport instance. transport = transports.DataprocMetastoreGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = DataprocMetastoreClient( @@ -4363,7 +4219,7 @@ def test_credentials_transport_error(): # It is an error to provide scopes and a transport instance. transport = transports.DataprocMetastoreGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = DataprocMetastoreClient( @@ -4374,7 +4230,7 @@ def test_credentials_transport_error(): def test_transport_instance(): # A client may be instantiated with a custom transport instance. transport = transports.DataprocMetastoreGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) client = DataprocMetastoreClient(transport=transport) assert client.transport is transport @@ -4383,13 +4239,13 @@ def test_transport_instance(): def test_transport_get_channel(): # A client may be instantiated with a custom transport instance. transport = transports.DataprocMetastoreGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) channel = transport.grpc_channel assert channel transport = transports.DataprocMetastoreGrpcAsyncIOTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) channel = transport.grpc_channel assert channel @@ -4404,23 +4260,23 @@ def test_transport_get_channel(): ) def test_transport_adc(transport_class): # Test default credentials are used if not provided. - with mock.patch.object(auth, "default") as adc: - adc.return_value = (credentials.AnonymousCredentials(), None) + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport_class() adc.assert_called_once() def test_transport_grpc_default(): # A client should use the gRPC transport by default. - client = DataprocMetastoreClient(credentials=credentials.AnonymousCredentials(),) + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials(),) assert isinstance(client.transport, transports.DataprocMetastoreGrpcTransport,) def test_dataproc_metastore_base_transport_error(): # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(exceptions.DuplicateCredentialArgs): + with pytest.raises(core_exceptions.DuplicateCredentialArgs): transport = transports.DataprocMetastoreTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), credentials_file="credentials.json", ) @@ -4432,7 +4288,7 @@ def test_dataproc_metastore_base_transport(): ) as Transport: Transport.return_value = None transport = transports.DataprocMetastoreTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Every method on the transport should just blindly @@ -4464,15 +4320,37 @@ def test_dataproc_metastore_base_transport(): transport.operations_client +@requires_google_auth_gte_1_25_0 def test_dataproc_metastore_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( - auth, "load_credentials_from_file" + google.auth, "load_credentials_from_file", autospec=True ) as load_creds, mock.patch( "google.cloud.metastore_v1alpha.services.dataproc_metastore.transports.DataprocMetastoreTransport._prep_wrapped_messages" ) as Transport: Transport.return_value = None - load_creds.return_value = (credentials.AnonymousCredentials(), None) + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.DataprocMetastoreTransport( + credentials_file="credentials.json", quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@requires_google_auth_lt_1_25_0 +def test_dataproc_metastore_base_transport_with_credentials_file_old_google_auth(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.metastore_v1alpha.services.dataproc_metastore.transports.DataprocMetastoreTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.DataprocMetastoreTransport( credentials_file="credentials.json", quota_project_id="octopus", ) @@ -4485,19 +4363,33 @@ def test_dataproc_metastore_base_transport_with_credentials_file(): def test_dataproc_metastore_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(auth, "default") as adc, mock.patch( + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( "google.cloud.metastore_v1alpha.services.dataproc_metastore.transports.DataprocMetastoreTransport._prep_wrapped_messages" ) as Transport: Transport.return_value = None - adc.return_value = (credentials.AnonymousCredentials(), None) + adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.DataprocMetastoreTransport() adc.assert_called_once() +@requires_google_auth_gte_1_25_0 def test_dataproc_metastore_auth_adc(): # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(auth, "default") as adc: - adc.return_value = (credentials.AnonymousCredentials(), None) + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + DataprocMetastoreClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@requires_google_auth_lt_1_25_0 +def test_dataproc_metastore_auth_adc_old_google_auth(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) DataprocMetastoreClient() adc.assert_called_once_with( scopes=("https://www.googleapis.com/auth/cloud-platform",), @@ -4505,20 +4397,156 @@ def test_dataproc_metastore_auth_adc(): ) -def test_dataproc_metastore_transport_auth_adc(): +@pytest.mark.parametrize( + "transport_class", + [ + transports.DataprocMetastoreGrpcTransport, + transports.DataprocMetastoreGrpcAsyncIOTransport, + ], +) +@requires_google_auth_gte_1_25_0 +def test_dataproc_metastore_transport_auth_adc(transport_class): # If credentials and host are not provided, the transport class should use # ADC credentials. - with mock.patch.object(auth, "default") as adc: - adc.return_value = (credentials.AnonymousCredentials(), None) - transports.DataprocMetastoreGrpcTransport( - host="squid.clam.whelk", quota_project_id="octopus" + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DataprocMetastoreGrpcTransport, + transports.DataprocMetastoreGrpcAsyncIOTransport, + ], +) +@requires_google_auth_lt_1_25_0 +def test_dataproc_metastore_transport_auth_adc_old_google_auth(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus") adc.assert_called_once_with( scopes=("https://www.googleapis.com/auth/cloud-platform",), quota_project_id="octopus", ) +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.DataprocMetastoreGrpcTransport, grpc_helpers), + (transports.DataprocMetastoreGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +@requires_api_core_gte_1_26_0 +def test_dataproc_metastore_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "metastore.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="metastore.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.DataprocMetastoreGrpcTransport, grpc_helpers), + (transports.DataprocMetastoreGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +@requires_api_core_lt_1_26_0 +def test_dataproc_metastore_transport_create_channel_old_api_core( + transport_class, grpc_helpers +): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus") + + create_channel.assert_called_with( + "metastore.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + scopes=("https://www.googleapis.com/auth/cloud-platform",), + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.DataprocMetastoreGrpcTransport, grpc_helpers), + (transports.DataprocMetastoreGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +@requires_api_core_lt_1_26_0 +def test_dataproc_metastore_transport_create_channel_user_scopes( + transport_class, grpc_helpers +): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "metastore.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + scopes=["1", "2"], + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + @pytest.mark.parametrize( "transport_class", [ @@ -4527,7 +4555,7 @@ def test_dataproc_metastore_transport_auth_adc(): ], ) def test_dataproc_metastore_grpc_transport_client_cert_source_for_mtls(transport_class): - cred = credentials.AnonymousCredentials() + cred = ga_credentials.AnonymousCredentials() # Check ssl_channel_credentials is used if provided. with mock.patch.object(transport_class, "create_channel") as mock_create_channel: @@ -4566,7 +4594,7 @@ def test_dataproc_metastore_grpc_transport_client_cert_source_for_mtls(transport def test_dataproc_metastore_host_no_port(): client = DataprocMetastoreClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="metastore.googleapis.com" ), @@ -4576,7 +4604,7 @@ def test_dataproc_metastore_host_no_port(): def test_dataproc_metastore_host_with_port(): client = DataprocMetastoreClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="metastore.googleapis.com:8000" ), @@ -4632,9 +4660,9 @@ def test_dataproc_metastore_transport_channel_mtls_with_client_cert_source( mock_grpc_channel = mock.Mock() grpc_create_channel.return_value = mock_grpc_channel - cred = credentials.AnonymousCredentials() + cred = ga_credentials.AnonymousCredentials() with pytest.warns(DeprecationWarning): - with mock.patch.object(auth, "default") as adc: + with mock.patch.object(google.auth, "default") as adc: adc.return_value = (cred, None) transport = transport_class( host="squid.clam.whelk", @@ -4710,7 +4738,7 @@ def test_dataproc_metastore_transport_channel_mtls_with_adc(transport_class): def test_dataproc_metastore_grpc_lro_client(): client = DataprocMetastoreClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) transport = client.transport @@ -4723,7 +4751,7 @@ def test_dataproc_metastore_grpc_lro_client(): def test_dataproc_metastore_grpc_lro_async_client(): client = DataprocMetastoreAsyncClient( - credentials=credentials.AnonymousCredentials(), transport="grpc_asyncio", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc_asyncio", ) transport = client.transport @@ -4739,7 +4767,6 @@ def test_backup_path(): location = "clam" service = "whelk" backup = "octopus" - expected = "projects/{project}/locations/{location}/services/{service}/backups/{backup}".format( project=project, location=location, service=service, backup=backup, ) @@ -4766,7 +4793,6 @@ def test_metadata_import_path(): location = "nautilus" service = "scallop" metadata_import = "abalone" - expected = "projects/{project}/locations/{location}/services/{service}/metadataImports/{metadata_import}".format( project=project, location=location, @@ -4796,7 +4822,6 @@ def test_parse_metadata_import_path(): def test_network_path(): project = "oyster" network = "nudibranch" - expected = "projects/{project}/global/networks/{network}".format( project=project, network=network, ) @@ -4820,7 +4845,6 @@ def test_service_path(): project = "winkle" location = "nautilus" service = "scallop" - expected = "projects/{project}/locations/{location}/services/{service}".format( project=project, location=location, service=service, ) @@ -4843,7 +4867,6 @@ def test_parse_service_path(): def test_common_billing_account_path(): billing_account = "whelk" - expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -4864,7 +4887,6 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): folder = "oyster" - expected = "folders/{folder}".format(folder=folder,) actual = DataprocMetastoreClient.common_folder_path(folder) assert expected == actual @@ -4883,7 +4905,6 @@ def test_parse_common_folder_path(): def test_common_organization_path(): organization = "cuttlefish" - expected = "organizations/{organization}".format(organization=organization,) actual = DataprocMetastoreClient.common_organization_path(organization) assert expected == actual @@ -4902,7 +4923,6 @@ def test_parse_common_organization_path(): def test_common_project_path(): project = "winkle" - expected = "projects/{project}".format(project=project,) actual = DataprocMetastoreClient.common_project_path(project) assert expected == actual @@ -4922,7 +4942,6 @@ def test_parse_common_project_path(): def test_common_location_path(): project = "scallop" location = "abalone" - expected = "projects/{project}/locations/{location}".format( project=project, location=location, ) @@ -4949,7 +4968,7 @@ def test_client_withDEFAULT_CLIENT_INFO(): transports.DataprocMetastoreTransport, "_prep_wrapped_messages" ) as prep: client = DataprocMetastoreClient( - credentials=credentials.AnonymousCredentials(), client_info=client_info, + credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -4958,6 +4977,6 @@ def test_client_withDEFAULT_CLIENT_INFO(): ) as prep: transport_class = DataprocMetastoreClient.get_transport_class() transport = transport_class( - credentials=credentials.AnonymousCredentials(), client_info=client_info, + credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) diff --git a/tests/unit/gapic/metastore_v1beta/__init__.py b/tests/unit/gapic/metastore_v1beta/__init__.py index 42ffdf2..4de6597 100644 --- a/tests/unit/gapic/metastore_v1beta/__init__.py +++ b/tests/unit/gapic/metastore_v1beta/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/unit/gapic/metastore_v1beta/test_dataproc_metastore.py b/tests/unit/gapic/metastore_v1beta/test_dataproc_metastore.py index 7d77d1a..ec80a11 100644 --- a/tests/unit/gapic/metastore_v1beta/test_dataproc_metastore.py +++ b/tests/unit/gapic/metastore_v1beta/test_dataproc_metastore.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,9 +13,9 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import os import mock +import packaging.version import grpc from grpc.experimental import aio @@ -24,16 +23,16 @@ import pytest from proto.marshal.rules.dates import DurationRule, TimestampRule -from google import auth + from google.api_core import client_options -from google.api_core import exceptions +from google.api_core import exceptions as core_exceptions from google.api_core import future from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async from google.api_core import operation_async # type: ignore from google.api_core import operations_v1 -from google.auth import credentials +from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.metastore_v1beta.services.dataproc_metastore import ( DataprocMetastoreAsyncClient, @@ -43,13 +42,43 @@ ) from google.cloud.metastore_v1beta.services.dataproc_metastore import pagers from google.cloud.metastore_v1beta.services.dataproc_metastore import transports +from google.cloud.metastore_v1beta.services.dataproc_metastore.transports.base import ( + _API_CORE_VERSION, +) +from google.cloud.metastore_v1beta.services.dataproc_metastore.transports.base import ( + _GOOGLE_AUTH_VERSION, +) from google.cloud.metastore_v1beta.types import metastore from google.longrunning import operations_pb2 from google.oauth2 import service_account -from google.protobuf import field_mask_pb2 as field_mask # type: ignore -from google.protobuf import timestamp_pb2 as timestamp # type: ignore -from google.protobuf import wrappers_pb2 as wrappers # type: ignore -from google.type import dayofweek_pb2 as dayofweek # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.protobuf import wrappers_pb2 # type: ignore +from google.type import dayofweek_pb2 # type: ignore +import google.auth + + +# TODO(busunkim): Once google-api-core >= 1.26.0 is required: +# - Delete all the api-core and auth "less than" test cases +# - Delete these pytest markers (Make the "greater than or equal to" tests the default). +requires_google_auth_lt_1_25_0 = pytest.mark.skipif( + packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), + reason="This test requires google-auth < 1.25.0", +) +requires_google_auth_gte_1_25_0 = pytest.mark.skipif( + packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), + reason="This test requires google-auth >= 1.25.0", +) + +requires_api_core_lt_1_26_0 = pytest.mark.skipif( + packaging.version.parse(_API_CORE_VERSION) >= packaging.version.parse("1.26.0"), + reason="This test requires google-api-core < 1.26.0", +) + +requires_api_core_gte_1_26_0 = pytest.mark.skipif( + packaging.version.parse(_API_CORE_VERSION) < packaging.version.parse("1.26.0"), + reason="This test requires google-api-core >= 1.26.0", +) def client_cert_source_callback(): @@ -101,7 +130,7 @@ def test__get_default_mtls_endpoint(): "client_class", [DataprocMetastoreClient, DataprocMetastoreAsyncClient,] ) def test_dataproc_metastore_client_from_service_account_info(client_class): - creds = credentials.AnonymousCredentials() + creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_info" ) as factory: @@ -118,7 +147,7 @@ def test_dataproc_metastore_client_from_service_account_info(client_class): "client_class", [DataprocMetastoreClient, DataprocMetastoreAsyncClient,] ) def test_dataproc_metastore_client_from_service_account_file(client_class): - creds = credentials.AnonymousCredentials() + creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_file" ) as factory: @@ -171,7 +200,7 @@ def test_dataproc_metastore_client_client_options( ): # Check that if channel is provided we won't create a new one. with mock.patch.object(DataprocMetastoreClient, "get_transport_class") as gtc: - transport = transport_class(credentials=credentials.AnonymousCredentials()) + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) client = client_class(transport=transport) gtc.assert_not_called() @@ -469,7 +498,7 @@ def test_list_services( transport: str = "grpc", request_type=metastore.ListServicesRequest ): client = DataprocMetastoreClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -482,21 +511,16 @@ def test_list_services( call.return_value = metastore.ListServicesResponse( next_page_token="next_page_token_value", unreachable=["unreachable_value"], ) - response = client.list_services(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == metastore.ListServicesRequest() # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListServicesPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] @@ -508,7 +532,7 @@ def test_list_services_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DataprocMetastoreClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -516,7 +540,6 @@ def test_list_services_empty_call(): client.list_services() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == metastore.ListServicesRequest() @@ -525,7 +548,7 @@ async def test_list_services_async( transport: str = "grpc_asyncio", request_type=metastore.ListServicesRequest ): client = DataprocMetastoreAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -541,20 +564,16 @@ async def test_list_services_async( unreachable=["unreachable_value"], ) ) - response = await client.list_services(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == metastore.ListServicesRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListServicesAsyncPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] @@ -564,17 +583,17 @@ async def test_list_services_async_from_dict(): def test_list_services_field_headers(): - client = DataprocMetastoreClient(credentials=credentials.AnonymousCredentials(),) + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = metastore.ListServicesRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_services), "__call__") as call: call.return_value = metastore.ListServicesResponse() - client.list_services(request) # Establish that the underlying gRPC stub method was called. @@ -590,12 +609,13 @@ def test_list_services_field_headers(): @pytest.mark.asyncio async def test_list_services_field_headers_async(): client = DataprocMetastoreAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = metastore.ListServicesRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -603,7 +623,6 @@ async def test_list_services_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( metastore.ListServicesResponse() ) - await client.list_services(request) # Establish that the underlying gRPC stub method was called. @@ -617,13 +636,12 @@ async def test_list_services_field_headers_async(): def test_list_services_flattened(): - client = DataprocMetastoreClient(credentials=credentials.AnonymousCredentials(),) + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_services), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = metastore.ListServicesResponse() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.list_services(parent="parent_value",) @@ -632,12 +650,11 @@ def test_list_services_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" def test_list_services_flattened_error(): - client = DataprocMetastoreClient(credentials=credentials.AnonymousCredentials(),) + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -650,7 +667,7 @@ def test_list_services_flattened_error(): @pytest.mark.asyncio async def test_list_services_flattened_async(): client = DataprocMetastoreAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -669,14 +686,13 @@ async def test_list_services_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" @pytest.mark.asyncio async def test_list_services_flattened_error_async(): client = DataprocMetastoreAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -688,7 +704,7 @@ async def test_list_services_flattened_error_async(): def test_list_services_pager(): - client = DataprocMetastoreClient(credentials=credentials.AnonymousCredentials,) + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_services), "__call__") as call: @@ -726,7 +742,7 @@ def test_list_services_pager(): def test_list_services_pages(): - client = DataprocMetastoreClient(credentials=credentials.AnonymousCredentials,) + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_services), "__call__") as call: @@ -756,7 +772,9 @@ def test_list_services_pages(): @pytest.mark.asyncio async def test_list_services_async_pager(): - client = DataprocMetastoreAsyncClient(credentials=credentials.AnonymousCredentials,) + client = DataprocMetastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -793,7 +811,9 @@ async def test_list_services_async_pager(): @pytest.mark.asyncio async def test_list_services_async_pages(): - client = DataprocMetastoreAsyncClient(credentials=credentials.AnonymousCredentials,) + client = DataprocMetastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -827,7 +847,7 @@ async def test_list_services_async_pages(): def test_get_service(transport: str = "grpc", request_type=metastore.GetServiceRequest): client = DataprocMetastoreClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -852,37 +872,24 @@ def test_get_service(transport: str = "grpc", request_type=metastore.GetServiceR version="version_value" ), ) - response = client.get_service(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == metastore.GetServiceRequest() # Establish that the response is the type that we expect. - assert isinstance(response, metastore.Service) - assert response.name == "name_value" - assert response.network == "network_value" - assert response.endpoint_uri == "endpoint_uri_value" - assert response.port == 453 - assert response.state == metastore.Service.State.CREATING - assert response.state_message == "state_message_value" - assert response.artifact_gcs_uri == "artifact_gcs_uri_value" - assert response.tier == metastore.Service.Tier.DEVELOPER - assert response.uid == "uid_value" - assert response.release_channel == metastore.Service.ReleaseChannel.CANARY @@ -894,7 +901,7 @@ def test_get_service_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DataprocMetastoreClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -902,7 +909,6 @@ def test_get_service_empty_call(): client.get_service() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == metastore.GetServiceRequest() @@ -911,7 +917,7 @@ async def test_get_service_async( transport: str = "grpc_asyncio", request_type=metastore.GetServiceRequest ): client = DataprocMetastoreAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -935,36 +941,24 @@ async def test_get_service_async( release_channel=metastore.Service.ReleaseChannel.CANARY, ) ) - response = await client.get_service(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == metastore.GetServiceRequest() # Establish that the response is the type that we expect. assert isinstance(response, metastore.Service) - assert response.name == "name_value" - assert response.network == "network_value" - assert response.endpoint_uri == "endpoint_uri_value" - assert response.port == 453 - assert response.state == metastore.Service.State.CREATING - assert response.state_message == "state_message_value" - assert response.artifact_gcs_uri == "artifact_gcs_uri_value" - assert response.tier == metastore.Service.Tier.DEVELOPER - assert response.uid == "uid_value" - assert response.release_channel == metastore.Service.ReleaseChannel.CANARY @@ -974,17 +968,17 @@ async def test_get_service_async_from_dict(): def test_get_service_field_headers(): - client = DataprocMetastoreClient(credentials=credentials.AnonymousCredentials(),) + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = metastore.GetServiceRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_service), "__call__") as call: call.return_value = metastore.Service() - client.get_service(request) # Establish that the underlying gRPC stub method was called. @@ -1000,18 +994,18 @@ def test_get_service_field_headers(): @pytest.mark.asyncio async def test_get_service_field_headers_async(): client = DataprocMetastoreAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = metastore.GetServiceRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_service), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Service()) - await client.get_service(request) # Establish that the underlying gRPC stub method was called. @@ -1025,13 +1019,12 @@ async def test_get_service_field_headers_async(): def test_get_service_flattened(): - client = DataprocMetastoreClient(credentials=credentials.AnonymousCredentials(),) + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_service), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = metastore.Service() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.get_service(name="name_value",) @@ -1040,12 +1033,11 @@ def test_get_service_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" def test_get_service_flattened_error(): - client = DataprocMetastoreClient(credentials=credentials.AnonymousCredentials(),) + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1058,7 +1050,7 @@ def test_get_service_flattened_error(): @pytest.mark.asyncio async def test_get_service_flattened_async(): client = DataprocMetastoreAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1075,14 +1067,13 @@ async def test_get_service_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" @pytest.mark.asyncio async def test_get_service_flattened_error_async(): client = DataprocMetastoreAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -1097,7 +1088,7 @@ def test_create_service( transport: str = "grpc", request_type=metastore.CreateServiceRequest ): client = DataprocMetastoreClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1108,13 +1099,11 @@ def test_create_service( with mock.patch.object(type(client.transport.create_service), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.create_service(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == metastore.CreateServiceRequest() # Establish that the response is the type that we expect. @@ -1129,7 +1118,7 @@ def test_create_service_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DataprocMetastoreClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1137,7 +1126,6 @@ def test_create_service_empty_call(): client.create_service() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == metastore.CreateServiceRequest() @@ -1146,7 +1134,7 @@ async def test_create_service_async( transport: str = "grpc_asyncio", request_type=metastore.CreateServiceRequest ): client = DataprocMetastoreAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1159,13 +1147,11 @@ async def test_create_service_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - response = await client.create_service(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == metastore.CreateServiceRequest() # Establish that the response is the type that we expect. @@ -1178,17 +1164,17 @@ async def test_create_service_async_from_dict(): def test_create_service_field_headers(): - client = DataprocMetastoreClient(credentials=credentials.AnonymousCredentials(),) + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = metastore.CreateServiceRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_service), "__call__") as call: call.return_value = operations_pb2.Operation(name="operations/op") - client.create_service(request) # Establish that the underlying gRPC stub method was called. @@ -1204,12 +1190,13 @@ def test_create_service_field_headers(): @pytest.mark.asyncio async def test_create_service_field_headers_async(): client = DataprocMetastoreAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = metastore.CreateServiceRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1217,7 +1204,6 @@ async def test_create_service_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) - await client.create_service(request) # Establish that the underlying gRPC stub method was called. @@ -1231,13 +1217,12 @@ async def test_create_service_field_headers_async(): def test_create_service_flattened(): - client = DataprocMetastoreClient(credentials=credentials.AnonymousCredentials(),) + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_service), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.create_service( @@ -1254,18 +1239,15 @@ def test_create_service_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" - assert args[0].service == metastore.Service( hive_metastore_config=metastore.HiveMetastoreConfig(version="version_value") ) - assert args[0].service_id == "service_id_value" def test_create_service_flattened_error(): - client = DataprocMetastoreClient(credentials=credentials.AnonymousCredentials(),) + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1285,7 +1267,7 @@ def test_create_service_flattened_error(): @pytest.mark.asyncio async def test_create_service_flattened_async(): client = DataprocMetastoreAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1312,20 +1294,17 @@ async def test_create_service_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" - assert args[0].service == metastore.Service( hive_metastore_config=metastore.HiveMetastoreConfig(version="version_value") ) - assert args[0].service_id == "service_id_value" @pytest.mark.asyncio async def test_create_service_flattened_error_async(): client = DataprocMetastoreAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -1347,7 +1326,7 @@ def test_update_service( transport: str = "grpc", request_type=metastore.UpdateServiceRequest ): client = DataprocMetastoreClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1358,13 +1337,11 @@ def test_update_service( with mock.patch.object(type(client.transport.update_service), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.update_service(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == metastore.UpdateServiceRequest() # Establish that the response is the type that we expect. @@ -1379,7 +1356,7 @@ def test_update_service_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DataprocMetastoreClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1387,7 +1364,6 @@ def test_update_service_empty_call(): client.update_service() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == metastore.UpdateServiceRequest() @@ -1396,7 +1372,7 @@ async def test_update_service_async( transport: str = "grpc_asyncio", request_type=metastore.UpdateServiceRequest ): client = DataprocMetastoreAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1409,13 +1385,11 @@ async def test_update_service_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - response = await client.update_service(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == metastore.UpdateServiceRequest() # Establish that the response is the type that we expect. @@ -1428,17 +1402,17 @@ async def test_update_service_async_from_dict(): def test_update_service_field_headers(): - client = DataprocMetastoreClient(credentials=credentials.AnonymousCredentials(),) + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = metastore.UpdateServiceRequest() + request.service.name = "service.name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_service), "__call__") as call: call.return_value = operations_pb2.Operation(name="operations/op") - client.update_service(request) # Establish that the underlying gRPC stub method was called. @@ -1456,12 +1430,13 @@ def test_update_service_field_headers(): @pytest.mark.asyncio async def test_update_service_field_headers_async(): client = DataprocMetastoreAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = metastore.UpdateServiceRequest() + request.service.name = "service.name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1469,7 +1444,6 @@ async def test_update_service_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) - await client.update_service(request) # Establish that the underlying gRPC stub method was called. @@ -1485,13 +1459,12 @@ async def test_update_service_field_headers_async(): def test_update_service_flattened(): - client = DataprocMetastoreClient(credentials=credentials.AnonymousCredentials(),) + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_service), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.update_service( @@ -1500,23 +1473,21 @@ def test_update_service_flattened(): version="version_value" ) ), - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].service == metastore.Service( hive_metastore_config=metastore.HiveMetastoreConfig(version="version_value") ) - - assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"]) + assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) def test_update_service_flattened_error(): - client = DataprocMetastoreClient(credentials=credentials.AnonymousCredentials(),) + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1528,14 +1499,14 @@ def test_update_service_flattened_error(): version="version_value" ) ), - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.asyncio async def test_update_service_flattened_async(): client = DataprocMetastoreAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1554,25 +1525,23 @@ async def test_update_service_flattened_async(): version="version_value" ) ), - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].service == metastore.Service( hive_metastore_config=metastore.HiveMetastoreConfig(version="version_value") ) - - assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"]) + assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) @pytest.mark.asyncio async def test_update_service_flattened_error_async(): client = DataprocMetastoreAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -1585,7 +1554,7 @@ async def test_update_service_flattened_error_async(): version="version_value" ) ), - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @@ -1593,7 +1562,7 @@ def test_delete_service( transport: str = "grpc", request_type=metastore.DeleteServiceRequest ): client = DataprocMetastoreClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1604,13 +1573,11 @@ def test_delete_service( with mock.patch.object(type(client.transport.delete_service), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.delete_service(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == metastore.DeleteServiceRequest() # Establish that the response is the type that we expect. @@ -1625,7 +1592,7 @@ def test_delete_service_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DataprocMetastoreClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1633,7 +1600,6 @@ def test_delete_service_empty_call(): client.delete_service() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == metastore.DeleteServiceRequest() @@ -1642,7 +1608,7 @@ async def test_delete_service_async( transport: str = "grpc_asyncio", request_type=metastore.DeleteServiceRequest ): client = DataprocMetastoreAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1655,13 +1621,11 @@ async def test_delete_service_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - response = await client.delete_service(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == metastore.DeleteServiceRequest() # Establish that the response is the type that we expect. @@ -1674,17 +1638,17 @@ async def test_delete_service_async_from_dict(): def test_delete_service_field_headers(): - client = DataprocMetastoreClient(credentials=credentials.AnonymousCredentials(),) + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = metastore.DeleteServiceRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_service), "__call__") as call: call.return_value = operations_pb2.Operation(name="operations/op") - client.delete_service(request) # Establish that the underlying gRPC stub method was called. @@ -1700,12 +1664,13 @@ def test_delete_service_field_headers(): @pytest.mark.asyncio async def test_delete_service_field_headers_async(): client = DataprocMetastoreAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = metastore.DeleteServiceRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1713,7 +1678,6 @@ async def test_delete_service_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) - await client.delete_service(request) # Establish that the underlying gRPC stub method was called. @@ -1727,13 +1691,12 @@ async def test_delete_service_field_headers_async(): def test_delete_service_flattened(): - client = DataprocMetastoreClient(credentials=credentials.AnonymousCredentials(),) + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_service), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.delete_service(name="name_value",) @@ -1742,12 +1705,11 @@ def test_delete_service_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" def test_delete_service_flattened_error(): - client = DataprocMetastoreClient(credentials=credentials.AnonymousCredentials(),) + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1760,7 +1722,7 @@ def test_delete_service_flattened_error(): @pytest.mark.asyncio async def test_delete_service_flattened_async(): client = DataprocMetastoreAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1779,14 +1741,13 @@ async def test_delete_service_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" @pytest.mark.asyncio async def test_delete_service_flattened_error_async(): client = DataprocMetastoreAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -1801,7 +1762,7 @@ def test_list_metadata_imports( transport: str = "grpc", request_type=metastore.ListMetadataImportsRequest ): client = DataprocMetastoreClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1816,21 +1777,16 @@ def test_list_metadata_imports( call.return_value = metastore.ListMetadataImportsResponse( next_page_token="next_page_token_value", unreachable=["unreachable_value"], ) - response = client.list_metadata_imports(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == metastore.ListMetadataImportsRequest() # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListMetadataImportsPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] @@ -1842,7 +1798,7 @@ def test_list_metadata_imports_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DataprocMetastoreClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1852,7 +1808,6 @@ def test_list_metadata_imports_empty_call(): client.list_metadata_imports() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == metastore.ListMetadataImportsRequest() @@ -1861,7 +1816,7 @@ async def test_list_metadata_imports_async( transport: str = "grpc_asyncio", request_type=metastore.ListMetadataImportsRequest ): client = DataprocMetastoreAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1879,20 +1834,16 @@ async def test_list_metadata_imports_async( unreachable=["unreachable_value"], ) ) - response = await client.list_metadata_imports(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == metastore.ListMetadataImportsRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListMetadataImportsAsyncPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] @@ -1902,11 +1853,12 @@ async def test_list_metadata_imports_async_from_dict(): def test_list_metadata_imports_field_headers(): - client = DataprocMetastoreClient(credentials=credentials.AnonymousCredentials(),) + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = metastore.ListMetadataImportsRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1914,7 +1866,6 @@ def test_list_metadata_imports_field_headers(): type(client.transport.list_metadata_imports), "__call__" ) as call: call.return_value = metastore.ListMetadataImportsResponse() - client.list_metadata_imports(request) # Establish that the underlying gRPC stub method was called. @@ -1930,12 +1881,13 @@ def test_list_metadata_imports_field_headers(): @pytest.mark.asyncio async def test_list_metadata_imports_field_headers_async(): client = DataprocMetastoreAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = metastore.ListMetadataImportsRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1945,7 +1897,6 @@ async def test_list_metadata_imports_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( metastore.ListMetadataImportsResponse() ) - await client.list_metadata_imports(request) # Establish that the underlying gRPC stub method was called. @@ -1959,7 +1910,7 @@ async def test_list_metadata_imports_field_headers_async(): def test_list_metadata_imports_flattened(): - client = DataprocMetastoreClient(credentials=credentials.AnonymousCredentials(),) + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1967,7 +1918,6 @@ def test_list_metadata_imports_flattened(): ) as call: # Designate an appropriate return value for the call. call.return_value = metastore.ListMetadataImportsResponse() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.list_metadata_imports(parent="parent_value",) @@ -1976,12 +1926,11 @@ def test_list_metadata_imports_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" def test_list_metadata_imports_flattened_error(): - client = DataprocMetastoreClient(credentials=credentials.AnonymousCredentials(),) + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1994,7 +1943,7 @@ def test_list_metadata_imports_flattened_error(): @pytest.mark.asyncio async def test_list_metadata_imports_flattened_async(): client = DataprocMetastoreAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2015,14 +1964,13 @@ async def test_list_metadata_imports_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" @pytest.mark.asyncio async def test_list_metadata_imports_flattened_error_async(): client = DataprocMetastoreAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -2034,7 +1982,7 @@ async def test_list_metadata_imports_flattened_error_async(): def test_list_metadata_imports_pager(): - client = DataprocMetastoreClient(credentials=credentials.AnonymousCredentials,) + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2079,7 +2027,7 @@ def test_list_metadata_imports_pager(): def test_list_metadata_imports_pages(): - client = DataprocMetastoreClient(credentials=credentials.AnonymousCredentials,) + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2116,7 +2064,9 @@ def test_list_metadata_imports_pages(): @pytest.mark.asyncio async def test_list_metadata_imports_async_pager(): - client = DataprocMetastoreAsyncClient(credentials=credentials.AnonymousCredentials,) + client = DataprocMetastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2160,7 +2110,9 @@ async def test_list_metadata_imports_async_pager(): @pytest.mark.asyncio async def test_list_metadata_imports_async_pages(): - client = DataprocMetastoreAsyncClient(credentials=credentials.AnonymousCredentials,) + client = DataprocMetastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2203,7 +2155,7 @@ def test_get_metadata_import( transport: str = "grpc", request_type=metastore.GetMetadataImportRequest ): client = DataprocMetastoreClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2223,23 +2175,17 @@ def test_get_metadata_import( database_type=metastore.MetadataImport.DatabaseDump.DatabaseType.MYSQL ), ) - response = client.get_metadata_import(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == metastore.GetMetadataImportRequest() # Establish that the response is the type that we expect. - assert isinstance(response, metastore.MetadataImport) - assert response.name == "name_value" - assert response.description == "description_value" - assert response.state == metastore.MetadataImport.State.RUNNING @@ -2251,7 +2197,7 @@ def test_get_metadata_import_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DataprocMetastoreClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2261,7 +2207,6 @@ def test_get_metadata_import_empty_call(): client.get_metadata_import() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == metastore.GetMetadataImportRequest() @@ -2270,7 +2215,7 @@ async def test_get_metadata_import_async( transport: str = "grpc_asyncio", request_type=metastore.GetMetadataImportRequest ): client = DataprocMetastoreAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2289,22 +2234,17 @@ async def test_get_metadata_import_async( state=metastore.MetadataImport.State.RUNNING, ) ) - response = await client.get_metadata_import(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == metastore.GetMetadataImportRequest() # Establish that the response is the type that we expect. assert isinstance(response, metastore.MetadataImport) - assert response.name == "name_value" - assert response.description == "description_value" - assert response.state == metastore.MetadataImport.State.RUNNING @@ -2314,11 +2254,12 @@ async def test_get_metadata_import_async_from_dict(): def test_get_metadata_import_field_headers(): - client = DataprocMetastoreClient(credentials=credentials.AnonymousCredentials(),) + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = metastore.GetMetadataImportRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -2326,7 +2267,6 @@ def test_get_metadata_import_field_headers(): type(client.transport.get_metadata_import), "__call__" ) as call: call.return_value = metastore.MetadataImport() - client.get_metadata_import(request) # Establish that the underlying gRPC stub method was called. @@ -2342,12 +2282,13 @@ def test_get_metadata_import_field_headers(): @pytest.mark.asyncio async def test_get_metadata_import_field_headers_async(): client = DataprocMetastoreAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = metastore.GetMetadataImportRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -2357,7 +2298,6 @@ async def test_get_metadata_import_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( metastore.MetadataImport() ) - await client.get_metadata_import(request) # Establish that the underlying gRPC stub method was called. @@ -2371,7 +2311,7 @@ async def test_get_metadata_import_field_headers_async(): def test_get_metadata_import_flattened(): - client = DataprocMetastoreClient(credentials=credentials.AnonymousCredentials(),) + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2379,7 +2319,6 @@ def test_get_metadata_import_flattened(): ) as call: # Designate an appropriate return value for the call. call.return_value = metastore.MetadataImport() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.get_metadata_import(name="name_value",) @@ -2388,12 +2327,11 @@ def test_get_metadata_import_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" def test_get_metadata_import_flattened_error(): - client = DataprocMetastoreClient(credentials=credentials.AnonymousCredentials(),) + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2406,7 +2344,7 @@ def test_get_metadata_import_flattened_error(): @pytest.mark.asyncio async def test_get_metadata_import_flattened_async(): client = DataprocMetastoreAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2427,14 +2365,13 @@ async def test_get_metadata_import_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" @pytest.mark.asyncio async def test_get_metadata_import_flattened_error_async(): client = DataprocMetastoreAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -2449,7 +2386,7 @@ def test_create_metadata_import( transport: str = "grpc", request_type=metastore.CreateMetadataImportRequest ): client = DataprocMetastoreClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2462,13 +2399,11 @@ def test_create_metadata_import( ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.create_metadata_import(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == metastore.CreateMetadataImportRequest() # Establish that the response is the type that we expect. @@ -2483,7 +2418,7 @@ def test_create_metadata_import_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DataprocMetastoreClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2493,7 +2428,6 @@ def test_create_metadata_import_empty_call(): client.create_metadata_import() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == metastore.CreateMetadataImportRequest() @@ -2502,7 +2436,7 @@ async def test_create_metadata_import_async( transport: str = "grpc_asyncio", request_type=metastore.CreateMetadataImportRequest ): client = DataprocMetastoreAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2517,13 +2451,11 @@ async def test_create_metadata_import_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - response = await client.create_metadata_import(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == metastore.CreateMetadataImportRequest() # Establish that the response is the type that we expect. @@ -2536,11 +2468,12 @@ async def test_create_metadata_import_async_from_dict(): def test_create_metadata_import_field_headers(): - client = DataprocMetastoreClient(credentials=credentials.AnonymousCredentials(),) + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = metastore.CreateMetadataImportRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -2548,7 +2481,6 @@ def test_create_metadata_import_field_headers(): type(client.transport.create_metadata_import), "__call__" ) as call: call.return_value = operations_pb2.Operation(name="operations/op") - client.create_metadata_import(request) # Establish that the underlying gRPC stub method was called. @@ -2564,12 +2496,13 @@ def test_create_metadata_import_field_headers(): @pytest.mark.asyncio async def test_create_metadata_import_field_headers_async(): client = DataprocMetastoreAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = metastore.CreateMetadataImportRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -2579,7 +2512,6 @@ async def test_create_metadata_import_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) - await client.create_metadata_import(request) # Establish that the underlying gRPC stub method was called. @@ -2593,7 +2525,7 @@ async def test_create_metadata_import_field_headers_async(): def test_create_metadata_import_flattened(): - client = DataprocMetastoreClient(credentials=credentials.AnonymousCredentials(),) + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2601,7 +2533,6 @@ def test_create_metadata_import_flattened(): ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.create_metadata_import( @@ -2618,20 +2549,17 @@ def test_create_metadata_import_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" - assert args[0].metadata_import == metastore.MetadataImport( database_dump=metastore.MetadataImport.DatabaseDump( database_type=metastore.MetadataImport.DatabaseDump.DatabaseType.MYSQL ) ) - assert args[0].metadata_import_id == "metadata_import_id_value" def test_create_metadata_import_flattened_error(): - client = DataprocMetastoreClient(credentials=credentials.AnonymousCredentials(),) + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2651,7 +2579,7 @@ def test_create_metadata_import_flattened_error(): @pytest.mark.asyncio async def test_create_metadata_import_flattened_async(): client = DataprocMetastoreAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2680,22 +2608,19 @@ async def test_create_metadata_import_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" - assert args[0].metadata_import == metastore.MetadataImport( database_dump=metastore.MetadataImport.DatabaseDump( database_type=metastore.MetadataImport.DatabaseDump.DatabaseType.MYSQL ) ) - assert args[0].metadata_import_id == "metadata_import_id_value" @pytest.mark.asyncio async def test_create_metadata_import_flattened_error_async(): client = DataprocMetastoreAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -2717,7 +2642,7 @@ def test_update_metadata_import( transport: str = "grpc", request_type=metastore.UpdateMetadataImportRequest ): client = DataprocMetastoreClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2730,13 +2655,11 @@ def test_update_metadata_import( ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.update_metadata_import(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == metastore.UpdateMetadataImportRequest() # Establish that the response is the type that we expect. @@ -2751,7 +2674,7 @@ def test_update_metadata_import_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DataprocMetastoreClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2761,7 +2684,6 @@ def test_update_metadata_import_empty_call(): client.update_metadata_import() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == metastore.UpdateMetadataImportRequest() @@ -2770,7 +2692,7 @@ async def test_update_metadata_import_async( transport: str = "grpc_asyncio", request_type=metastore.UpdateMetadataImportRequest ): client = DataprocMetastoreAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2785,13 +2707,11 @@ async def test_update_metadata_import_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - response = await client.update_metadata_import(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == metastore.UpdateMetadataImportRequest() # Establish that the response is the type that we expect. @@ -2804,11 +2724,12 @@ async def test_update_metadata_import_async_from_dict(): def test_update_metadata_import_field_headers(): - client = DataprocMetastoreClient(credentials=credentials.AnonymousCredentials(),) + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = metastore.UpdateMetadataImportRequest() + request.metadata_import.name = "metadata_import.name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -2816,7 +2737,6 @@ def test_update_metadata_import_field_headers(): type(client.transport.update_metadata_import), "__call__" ) as call: call.return_value = operations_pb2.Operation(name="operations/op") - client.update_metadata_import(request) # Establish that the underlying gRPC stub method was called. @@ -2835,12 +2755,13 @@ def test_update_metadata_import_field_headers(): @pytest.mark.asyncio async def test_update_metadata_import_field_headers_async(): client = DataprocMetastoreAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = metastore.UpdateMetadataImportRequest() + request.metadata_import.name = "metadata_import.name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -2850,7 +2771,6 @@ async def test_update_metadata_import_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) - await client.update_metadata_import(request) # Establish that the underlying gRPC stub method was called. @@ -2867,7 +2787,7 @@ async def test_update_metadata_import_field_headers_async(): def test_update_metadata_import_flattened(): - client = DataprocMetastoreClient(credentials=credentials.AnonymousCredentials(),) + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2875,7 +2795,6 @@ def test_update_metadata_import_flattened(): ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.update_metadata_import( @@ -2884,25 +2803,23 @@ def test_update_metadata_import_flattened(): database_type=metastore.MetadataImport.DatabaseDump.DatabaseType.MYSQL ) ), - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].metadata_import == metastore.MetadataImport( database_dump=metastore.MetadataImport.DatabaseDump( database_type=metastore.MetadataImport.DatabaseDump.DatabaseType.MYSQL ) ) - - assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"]) + assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) def test_update_metadata_import_flattened_error(): - client = DataprocMetastoreClient(credentials=credentials.AnonymousCredentials(),) + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2914,14 +2831,14 @@ def test_update_metadata_import_flattened_error(): database_type=metastore.MetadataImport.DatabaseDump.DatabaseType.MYSQL ) ), - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.asyncio async def test_update_metadata_import_flattened_async(): client = DataprocMetastoreAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2942,27 +2859,25 @@ async def test_update_metadata_import_flattened_async(): database_type=metastore.MetadataImport.DatabaseDump.DatabaseType.MYSQL ) ), - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].metadata_import == metastore.MetadataImport( database_dump=metastore.MetadataImport.DatabaseDump( database_type=metastore.MetadataImport.DatabaseDump.DatabaseType.MYSQL ) ) - - assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"]) + assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) @pytest.mark.asyncio async def test_update_metadata_import_flattened_error_async(): client = DataprocMetastoreAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -2975,7 +2890,7 @@ async def test_update_metadata_import_flattened_error_async(): database_type=metastore.MetadataImport.DatabaseDump.DatabaseType.MYSQL ) ), - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @@ -2983,7 +2898,7 @@ def test_export_metadata( transport: str = "grpc", request_type=metastore.ExportMetadataRequest ): client = DataprocMetastoreClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2994,13 +2909,11 @@ def test_export_metadata( with mock.patch.object(type(client.transport.export_metadata), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.export_metadata(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == metastore.ExportMetadataRequest() # Establish that the response is the type that we expect. @@ -3015,7 +2928,7 @@ def test_export_metadata_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DataprocMetastoreClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3023,7 +2936,6 @@ def test_export_metadata_empty_call(): client.export_metadata() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == metastore.ExportMetadataRequest() @@ -3032,7 +2944,7 @@ async def test_export_metadata_async( transport: str = "grpc_asyncio", request_type=metastore.ExportMetadataRequest ): client = DataprocMetastoreAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3045,13 +2957,11 @@ async def test_export_metadata_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - response = await client.export_metadata(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == metastore.ExportMetadataRequest() # Establish that the response is the type that we expect. @@ -3064,17 +2974,17 @@ async def test_export_metadata_async_from_dict(): def test_export_metadata_field_headers(): - client = DataprocMetastoreClient(credentials=credentials.AnonymousCredentials(),) + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = metastore.ExportMetadataRequest() + request.service = "service/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.export_metadata), "__call__") as call: call.return_value = operations_pb2.Operation(name="operations/op") - client.export_metadata(request) # Establish that the underlying gRPC stub method was called. @@ -3090,12 +3000,13 @@ def test_export_metadata_field_headers(): @pytest.mark.asyncio async def test_export_metadata_field_headers_async(): client = DataprocMetastoreAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = metastore.ExportMetadataRequest() + request.service = "service/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -3103,7 +3014,6 @@ async def test_export_metadata_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) - await client.export_metadata(request) # Establish that the underlying gRPC stub method was called. @@ -3120,7 +3030,7 @@ def test_restore_service( transport: str = "grpc", request_type=metastore.RestoreServiceRequest ): client = DataprocMetastoreClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3131,13 +3041,11 @@ def test_restore_service( with mock.patch.object(type(client.transport.restore_service), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.restore_service(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == metastore.RestoreServiceRequest() # Establish that the response is the type that we expect. @@ -3152,7 +3060,7 @@ def test_restore_service_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DataprocMetastoreClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3160,7 +3068,6 @@ def test_restore_service_empty_call(): client.restore_service() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == metastore.RestoreServiceRequest() @@ -3169,7 +3076,7 @@ async def test_restore_service_async( transport: str = "grpc_asyncio", request_type=metastore.RestoreServiceRequest ): client = DataprocMetastoreAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3182,13 +3089,11 @@ async def test_restore_service_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - response = await client.restore_service(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == metastore.RestoreServiceRequest() # Establish that the response is the type that we expect. @@ -3201,17 +3106,17 @@ async def test_restore_service_async_from_dict(): def test_restore_service_field_headers(): - client = DataprocMetastoreClient(credentials=credentials.AnonymousCredentials(),) + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = metastore.RestoreServiceRequest() + request.service = "service/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.restore_service), "__call__") as call: call.return_value = operations_pb2.Operation(name="operations/op") - client.restore_service(request) # Establish that the underlying gRPC stub method was called. @@ -3227,12 +3132,13 @@ def test_restore_service_field_headers(): @pytest.mark.asyncio async def test_restore_service_field_headers_async(): client = DataprocMetastoreAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = metastore.RestoreServiceRequest() + request.service = "service/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -3240,7 +3146,6 @@ async def test_restore_service_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) - await client.restore_service(request) # Establish that the underlying gRPC stub method was called. @@ -3254,13 +3159,12 @@ async def test_restore_service_field_headers_async(): def test_restore_service_flattened(): - client = DataprocMetastoreClient(credentials=credentials.AnonymousCredentials(),) + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.restore_service), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.restore_service( @@ -3271,14 +3175,12 @@ def test_restore_service_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].service == "service_value" - assert args[0].backup == "backup_value" def test_restore_service_flattened_error(): - client = DataprocMetastoreClient(credentials=credentials.AnonymousCredentials(),) + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -3293,7 +3195,7 @@ def test_restore_service_flattened_error(): @pytest.mark.asyncio async def test_restore_service_flattened_async(): client = DataprocMetastoreAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3314,16 +3216,14 @@ async def test_restore_service_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].service == "service_value" - assert args[0].backup == "backup_value" @pytest.mark.asyncio async def test_restore_service_flattened_error_async(): client = DataprocMetastoreAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -3340,7 +3240,7 @@ def test_list_backups( transport: str = "grpc", request_type=metastore.ListBackupsRequest ): client = DataprocMetastoreClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3353,21 +3253,16 @@ def test_list_backups( call.return_value = metastore.ListBackupsResponse( next_page_token="next_page_token_value", unreachable=["unreachable_value"], ) - response = client.list_backups(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == metastore.ListBackupsRequest() # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListBackupsPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] @@ -3379,7 +3274,7 @@ def test_list_backups_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DataprocMetastoreClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3387,7 +3282,6 @@ def test_list_backups_empty_call(): client.list_backups() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == metastore.ListBackupsRequest() @@ -3396,7 +3290,7 @@ async def test_list_backups_async( transport: str = "grpc_asyncio", request_type=metastore.ListBackupsRequest ): client = DataprocMetastoreAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3412,20 +3306,16 @@ async def test_list_backups_async( unreachable=["unreachable_value"], ) ) - response = await client.list_backups(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == metastore.ListBackupsRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListBackupsAsyncPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] @@ -3435,17 +3325,17 @@ async def test_list_backups_async_from_dict(): def test_list_backups_field_headers(): - client = DataprocMetastoreClient(credentials=credentials.AnonymousCredentials(),) + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = metastore.ListBackupsRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_backups), "__call__") as call: call.return_value = metastore.ListBackupsResponse() - client.list_backups(request) # Establish that the underlying gRPC stub method was called. @@ -3461,12 +3351,13 @@ def test_list_backups_field_headers(): @pytest.mark.asyncio async def test_list_backups_field_headers_async(): client = DataprocMetastoreAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = metastore.ListBackupsRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -3474,7 +3365,6 @@ async def test_list_backups_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( metastore.ListBackupsResponse() ) - await client.list_backups(request) # Establish that the underlying gRPC stub method was called. @@ -3488,13 +3378,12 @@ async def test_list_backups_field_headers_async(): def test_list_backups_flattened(): - client = DataprocMetastoreClient(credentials=credentials.AnonymousCredentials(),) + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_backups), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = metastore.ListBackupsResponse() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.list_backups(parent="parent_value",) @@ -3503,12 +3392,11 @@ def test_list_backups_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" def test_list_backups_flattened_error(): - client = DataprocMetastoreClient(credentials=credentials.AnonymousCredentials(),) + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -3521,7 +3409,7 @@ def test_list_backups_flattened_error(): @pytest.mark.asyncio async def test_list_backups_flattened_async(): client = DataprocMetastoreAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3540,14 +3428,13 @@ async def test_list_backups_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" @pytest.mark.asyncio async def test_list_backups_flattened_error_async(): client = DataprocMetastoreAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -3559,7 +3446,7 @@ async def test_list_backups_flattened_error_async(): def test_list_backups_pager(): - client = DataprocMetastoreClient(credentials=credentials.AnonymousCredentials,) + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_backups), "__call__") as call: @@ -3593,7 +3480,7 @@ def test_list_backups_pager(): def test_list_backups_pages(): - client = DataprocMetastoreClient(credentials=credentials.AnonymousCredentials,) + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_backups), "__call__") as call: @@ -3619,7 +3506,9 @@ def test_list_backups_pages(): @pytest.mark.asyncio async def test_list_backups_async_pager(): - client = DataprocMetastoreAsyncClient(credentials=credentials.AnonymousCredentials,) + client = DataprocMetastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -3652,7 +3541,9 @@ async def test_list_backups_async_pager(): @pytest.mark.asyncio async def test_list_backups_async_pages(): - client = DataprocMetastoreAsyncClient(credentials=credentials.AnonymousCredentials,) + client = DataprocMetastoreAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -3682,7 +3573,7 @@ async def test_list_backups_async_pages(): def test_get_backup(transport: str = "grpc", request_type=metastore.GetBackupRequest): client = DataprocMetastoreClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3697,23 +3588,17 @@ def test_get_backup(transport: str = "grpc", request_type=metastore.GetBackupReq state=metastore.Backup.State.CREATING, description="description_value", ) - response = client.get_backup(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == metastore.GetBackupRequest() # Establish that the response is the type that we expect. - assert isinstance(response, metastore.Backup) - assert response.name == "name_value" - assert response.state == metastore.Backup.State.CREATING - assert response.description == "description_value" @@ -3725,7 +3610,7 @@ def test_get_backup_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DataprocMetastoreClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3733,7 +3618,6 @@ def test_get_backup_empty_call(): client.get_backup() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == metastore.GetBackupRequest() @@ -3742,7 +3626,7 @@ async def test_get_backup_async( transport: str = "grpc_asyncio", request_type=metastore.GetBackupRequest ): client = DataprocMetastoreAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3759,22 +3643,17 @@ async def test_get_backup_async( description="description_value", ) ) - response = await client.get_backup(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == metastore.GetBackupRequest() # Establish that the response is the type that we expect. assert isinstance(response, metastore.Backup) - assert response.name == "name_value" - assert response.state == metastore.Backup.State.CREATING - assert response.description == "description_value" @@ -3784,17 +3663,17 @@ async def test_get_backup_async_from_dict(): def test_get_backup_field_headers(): - client = DataprocMetastoreClient(credentials=credentials.AnonymousCredentials(),) + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = metastore.GetBackupRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_backup), "__call__") as call: call.return_value = metastore.Backup() - client.get_backup(request) # Establish that the underlying gRPC stub method was called. @@ -3810,18 +3689,18 @@ def test_get_backup_field_headers(): @pytest.mark.asyncio async def test_get_backup_field_headers_async(): client = DataprocMetastoreAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = metastore.GetBackupRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_backup), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Backup()) - await client.get_backup(request) # Establish that the underlying gRPC stub method was called. @@ -3835,13 +3714,12 @@ async def test_get_backup_field_headers_async(): def test_get_backup_flattened(): - client = DataprocMetastoreClient(credentials=credentials.AnonymousCredentials(),) + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_backup), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = metastore.Backup() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.get_backup(name="name_value",) @@ -3850,12 +3728,11 @@ def test_get_backup_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" def test_get_backup_flattened_error(): - client = DataprocMetastoreClient(credentials=credentials.AnonymousCredentials(),) + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -3868,7 +3745,7 @@ def test_get_backup_flattened_error(): @pytest.mark.asyncio async def test_get_backup_flattened_async(): client = DataprocMetastoreAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3885,14 +3762,13 @@ async def test_get_backup_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" @pytest.mark.asyncio async def test_get_backup_flattened_error_async(): client = DataprocMetastoreAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -3907,7 +3783,7 @@ def test_create_backup( transport: str = "grpc", request_type=metastore.CreateBackupRequest ): client = DataprocMetastoreClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3918,13 +3794,11 @@ def test_create_backup( with mock.patch.object(type(client.transport.create_backup), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.create_backup(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == metastore.CreateBackupRequest() # Establish that the response is the type that we expect. @@ -3939,7 +3813,7 @@ def test_create_backup_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DataprocMetastoreClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3947,7 +3821,6 @@ def test_create_backup_empty_call(): client.create_backup() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == metastore.CreateBackupRequest() @@ -3956,7 +3829,7 @@ async def test_create_backup_async( transport: str = "grpc_asyncio", request_type=metastore.CreateBackupRequest ): client = DataprocMetastoreAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3969,13 +3842,11 @@ async def test_create_backup_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - response = await client.create_backup(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == metastore.CreateBackupRequest() # Establish that the response is the type that we expect. @@ -3988,17 +3859,17 @@ async def test_create_backup_async_from_dict(): def test_create_backup_field_headers(): - client = DataprocMetastoreClient(credentials=credentials.AnonymousCredentials(),) + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = metastore.CreateBackupRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_backup), "__call__") as call: call.return_value = operations_pb2.Operation(name="operations/op") - client.create_backup(request) # Establish that the underlying gRPC stub method was called. @@ -4014,12 +3885,13 @@ def test_create_backup_field_headers(): @pytest.mark.asyncio async def test_create_backup_field_headers_async(): client = DataprocMetastoreAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = metastore.CreateBackupRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -4027,7 +3899,6 @@ async def test_create_backup_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) - await client.create_backup(request) # Establish that the underlying gRPC stub method was called. @@ -4041,13 +3912,12 @@ async def test_create_backup_field_headers_async(): def test_create_backup_flattened(): - client = DataprocMetastoreClient(credentials=credentials.AnonymousCredentials(),) + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_backup), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.create_backup( @@ -4060,16 +3930,13 @@ def test_create_backup_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" - assert args[0].backup == metastore.Backup(name="name_value") - assert args[0].backup_id == "backup_id_value" def test_create_backup_flattened_error(): - client = DataprocMetastoreClient(credentials=credentials.AnonymousCredentials(),) + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -4085,7 +3952,7 @@ def test_create_backup_flattened_error(): @pytest.mark.asyncio async def test_create_backup_flattened_async(): client = DataprocMetastoreAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4108,18 +3975,15 @@ async def test_create_backup_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" - assert args[0].backup == metastore.Backup(name="name_value") - assert args[0].backup_id == "backup_id_value" @pytest.mark.asyncio async def test_create_backup_flattened_error_async(): client = DataprocMetastoreAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -4137,7 +4001,7 @@ def test_delete_backup( transport: str = "grpc", request_type=metastore.DeleteBackupRequest ): client = DataprocMetastoreClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -4148,13 +4012,11 @@ def test_delete_backup( with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.delete_backup(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == metastore.DeleteBackupRequest() # Establish that the response is the type that we expect. @@ -4169,7 +4031,7 @@ def test_delete_backup_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DataprocMetastoreClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4177,7 +4039,6 @@ def test_delete_backup_empty_call(): client.delete_backup() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == metastore.DeleteBackupRequest() @@ -4186,7 +4047,7 @@ async def test_delete_backup_async( transport: str = "grpc_asyncio", request_type=metastore.DeleteBackupRequest ): client = DataprocMetastoreAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -4199,13 +4060,11 @@ async def test_delete_backup_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - response = await client.delete_backup(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == metastore.DeleteBackupRequest() # Establish that the response is the type that we expect. @@ -4218,17 +4077,17 @@ async def test_delete_backup_async_from_dict(): def test_delete_backup_field_headers(): - client = DataprocMetastoreClient(credentials=credentials.AnonymousCredentials(),) + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = metastore.DeleteBackupRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: call.return_value = operations_pb2.Operation(name="operations/op") - client.delete_backup(request) # Establish that the underlying gRPC stub method was called. @@ -4244,12 +4103,13 @@ def test_delete_backup_field_headers(): @pytest.mark.asyncio async def test_delete_backup_field_headers_async(): client = DataprocMetastoreAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = metastore.DeleteBackupRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -4257,7 +4117,6 @@ async def test_delete_backup_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) - await client.delete_backup(request) # Establish that the underlying gRPC stub method was called. @@ -4271,13 +4130,12 @@ async def test_delete_backup_field_headers_async(): def test_delete_backup_flattened(): - client = DataprocMetastoreClient(credentials=credentials.AnonymousCredentials(),) + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.delete_backup(name="name_value",) @@ -4286,12 +4144,11 @@ def test_delete_backup_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" def test_delete_backup_flattened_error(): - client = DataprocMetastoreClient(credentials=credentials.AnonymousCredentials(),) + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -4304,7 +4161,7 @@ def test_delete_backup_flattened_error(): @pytest.mark.asyncio async def test_delete_backup_flattened_async(): client = DataprocMetastoreAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4323,14 +4180,13 @@ async def test_delete_backup_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" @pytest.mark.asyncio async def test_delete_backup_flattened_error_async(): client = DataprocMetastoreAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -4344,16 +4200,16 @@ async def test_delete_backup_flattened_error_async(): def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.DataprocMetastoreGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = DataprocMetastoreClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # It is an error to provide a credentials file and a transport instance. transport = transports.DataprocMetastoreGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = DataprocMetastoreClient( @@ -4363,7 +4219,7 @@ def test_credentials_transport_error(): # It is an error to provide scopes and a transport instance. transport = transports.DataprocMetastoreGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = DataprocMetastoreClient( @@ -4374,7 +4230,7 @@ def test_credentials_transport_error(): def test_transport_instance(): # A client may be instantiated with a custom transport instance. transport = transports.DataprocMetastoreGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) client = DataprocMetastoreClient(transport=transport) assert client.transport is transport @@ -4383,13 +4239,13 @@ def test_transport_instance(): def test_transport_get_channel(): # A client may be instantiated with a custom transport instance. transport = transports.DataprocMetastoreGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) channel = transport.grpc_channel assert channel transport = transports.DataprocMetastoreGrpcAsyncIOTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) channel = transport.grpc_channel assert channel @@ -4404,23 +4260,23 @@ def test_transport_get_channel(): ) def test_transport_adc(transport_class): # Test default credentials are used if not provided. - with mock.patch.object(auth, "default") as adc: - adc.return_value = (credentials.AnonymousCredentials(), None) + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport_class() adc.assert_called_once() def test_transport_grpc_default(): # A client should use the gRPC transport by default. - client = DataprocMetastoreClient(credentials=credentials.AnonymousCredentials(),) + client = DataprocMetastoreClient(credentials=ga_credentials.AnonymousCredentials(),) assert isinstance(client.transport, transports.DataprocMetastoreGrpcTransport,) def test_dataproc_metastore_base_transport_error(): # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(exceptions.DuplicateCredentialArgs): + with pytest.raises(core_exceptions.DuplicateCredentialArgs): transport = transports.DataprocMetastoreTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), credentials_file="credentials.json", ) @@ -4432,7 +4288,7 @@ def test_dataproc_metastore_base_transport(): ) as Transport: Transport.return_value = None transport = transports.DataprocMetastoreTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Every method on the transport should just blindly @@ -4464,15 +4320,37 @@ def test_dataproc_metastore_base_transport(): transport.operations_client +@requires_google_auth_gte_1_25_0 def test_dataproc_metastore_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( - auth, "load_credentials_from_file" + google.auth, "load_credentials_from_file", autospec=True ) as load_creds, mock.patch( "google.cloud.metastore_v1beta.services.dataproc_metastore.transports.DataprocMetastoreTransport._prep_wrapped_messages" ) as Transport: Transport.return_value = None - load_creds.return_value = (credentials.AnonymousCredentials(), None) + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.DataprocMetastoreTransport( + credentials_file="credentials.json", quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@requires_google_auth_lt_1_25_0 +def test_dataproc_metastore_base_transport_with_credentials_file_old_google_auth(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.metastore_v1beta.services.dataproc_metastore.transports.DataprocMetastoreTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.DataprocMetastoreTransport( credentials_file="credentials.json", quota_project_id="octopus", ) @@ -4485,19 +4363,33 @@ def test_dataproc_metastore_base_transport_with_credentials_file(): def test_dataproc_metastore_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(auth, "default") as adc, mock.patch( + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( "google.cloud.metastore_v1beta.services.dataproc_metastore.transports.DataprocMetastoreTransport._prep_wrapped_messages" ) as Transport: Transport.return_value = None - adc.return_value = (credentials.AnonymousCredentials(), None) + adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.DataprocMetastoreTransport() adc.assert_called_once() +@requires_google_auth_gte_1_25_0 def test_dataproc_metastore_auth_adc(): # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(auth, "default") as adc: - adc.return_value = (credentials.AnonymousCredentials(), None) + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + DataprocMetastoreClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@requires_google_auth_lt_1_25_0 +def test_dataproc_metastore_auth_adc_old_google_auth(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) DataprocMetastoreClient() adc.assert_called_once_with( scopes=("https://www.googleapis.com/auth/cloud-platform",), @@ -4505,20 +4397,156 @@ def test_dataproc_metastore_auth_adc(): ) -def test_dataproc_metastore_transport_auth_adc(): +@pytest.mark.parametrize( + "transport_class", + [ + transports.DataprocMetastoreGrpcTransport, + transports.DataprocMetastoreGrpcAsyncIOTransport, + ], +) +@requires_google_auth_gte_1_25_0 +def test_dataproc_metastore_transport_auth_adc(transport_class): # If credentials and host are not provided, the transport class should use # ADC credentials. - with mock.patch.object(auth, "default") as adc: - adc.return_value = (credentials.AnonymousCredentials(), None) - transports.DataprocMetastoreGrpcTransport( - host="squid.clam.whelk", quota_project_id="octopus" + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DataprocMetastoreGrpcTransport, + transports.DataprocMetastoreGrpcAsyncIOTransport, + ], +) +@requires_google_auth_lt_1_25_0 +def test_dataproc_metastore_transport_auth_adc_old_google_auth(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus") adc.assert_called_once_with( scopes=("https://www.googleapis.com/auth/cloud-platform",), quota_project_id="octopus", ) +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.DataprocMetastoreGrpcTransport, grpc_helpers), + (transports.DataprocMetastoreGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +@requires_api_core_gte_1_26_0 +def test_dataproc_metastore_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "metastore.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="metastore.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.DataprocMetastoreGrpcTransport, grpc_helpers), + (transports.DataprocMetastoreGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +@requires_api_core_lt_1_26_0 +def test_dataproc_metastore_transport_create_channel_old_api_core( + transport_class, grpc_helpers +): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus") + + create_channel.assert_called_with( + "metastore.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + scopes=("https://www.googleapis.com/auth/cloud-platform",), + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.DataprocMetastoreGrpcTransport, grpc_helpers), + (transports.DataprocMetastoreGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +@requires_api_core_lt_1_26_0 +def test_dataproc_metastore_transport_create_channel_user_scopes( + transport_class, grpc_helpers +): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "metastore.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + scopes=["1", "2"], + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + @pytest.mark.parametrize( "transport_class", [ @@ -4527,7 +4555,7 @@ def test_dataproc_metastore_transport_auth_adc(): ], ) def test_dataproc_metastore_grpc_transport_client_cert_source_for_mtls(transport_class): - cred = credentials.AnonymousCredentials() + cred = ga_credentials.AnonymousCredentials() # Check ssl_channel_credentials is used if provided. with mock.patch.object(transport_class, "create_channel") as mock_create_channel: @@ -4566,7 +4594,7 @@ def test_dataproc_metastore_grpc_transport_client_cert_source_for_mtls(transport def test_dataproc_metastore_host_no_port(): client = DataprocMetastoreClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="metastore.googleapis.com" ), @@ -4576,7 +4604,7 @@ def test_dataproc_metastore_host_no_port(): def test_dataproc_metastore_host_with_port(): client = DataprocMetastoreClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="metastore.googleapis.com:8000" ), @@ -4632,9 +4660,9 @@ def test_dataproc_metastore_transport_channel_mtls_with_client_cert_source( mock_grpc_channel = mock.Mock() grpc_create_channel.return_value = mock_grpc_channel - cred = credentials.AnonymousCredentials() + cred = ga_credentials.AnonymousCredentials() with pytest.warns(DeprecationWarning): - with mock.patch.object(auth, "default") as adc: + with mock.patch.object(google.auth, "default") as adc: adc.return_value = (cred, None) transport = transport_class( host="squid.clam.whelk", @@ -4710,7 +4738,7 @@ def test_dataproc_metastore_transport_channel_mtls_with_adc(transport_class): def test_dataproc_metastore_grpc_lro_client(): client = DataprocMetastoreClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) transport = client.transport @@ -4723,7 +4751,7 @@ def test_dataproc_metastore_grpc_lro_client(): def test_dataproc_metastore_grpc_lro_async_client(): client = DataprocMetastoreAsyncClient( - credentials=credentials.AnonymousCredentials(), transport="grpc_asyncio", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc_asyncio", ) transport = client.transport @@ -4739,7 +4767,6 @@ def test_backup_path(): location = "clam" service = "whelk" backup = "octopus" - expected = "projects/{project}/locations/{location}/services/{service}/backups/{backup}".format( project=project, location=location, service=service, backup=backup, ) @@ -4766,7 +4793,6 @@ def test_metadata_import_path(): location = "nautilus" service = "scallop" metadata_import = "abalone" - expected = "projects/{project}/locations/{location}/services/{service}/metadataImports/{metadata_import}".format( project=project, location=location, @@ -4796,7 +4822,6 @@ def test_parse_metadata_import_path(): def test_network_path(): project = "oyster" network = "nudibranch" - expected = "projects/{project}/global/networks/{network}".format( project=project, network=network, ) @@ -4820,7 +4845,6 @@ def test_service_path(): project = "winkle" location = "nautilus" service = "scallop" - expected = "projects/{project}/locations/{location}/services/{service}".format( project=project, location=location, service=service, ) @@ -4843,7 +4867,6 @@ def test_parse_service_path(): def test_common_billing_account_path(): billing_account = "whelk" - expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -4864,7 +4887,6 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): folder = "oyster" - expected = "folders/{folder}".format(folder=folder,) actual = DataprocMetastoreClient.common_folder_path(folder) assert expected == actual @@ -4883,7 +4905,6 @@ def test_parse_common_folder_path(): def test_common_organization_path(): organization = "cuttlefish" - expected = "organizations/{organization}".format(organization=organization,) actual = DataprocMetastoreClient.common_organization_path(organization) assert expected == actual @@ -4902,7 +4923,6 @@ def test_parse_common_organization_path(): def test_common_project_path(): project = "winkle" - expected = "projects/{project}".format(project=project,) actual = DataprocMetastoreClient.common_project_path(project) assert expected == actual @@ -4922,7 +4942,6 @@ def test_parse_common_project_path(): def test_common_location_path(): project = "scallop" location = "abalone" - expected = "projects/{project}/locations/{location}".format( project=project, location=location, ) @@ -4949,7 +4968,7 @@ def test_client_withDEFAULT_CLIENT_INFO(): transports.DataprocMetastoreTransport, "_prep_wrapped_messages" ) as prep: client = DataprocMetastoreClient( - credentials=credentials.AnonymousCredentials(), client_info=client_info, + credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -4958,6 +4977,6 @@ def test_client_withDEFAULT_CLIENT_INFO(): ) as prep: transport_class = DataprocMetastoreClient.get_transport_class() transport = transport_class( - credentials=credentials.AnonymousCredentials(), client_info=client_info, + credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info)