From cdea9744d0bc7244a42894acc1446080a16b2dab Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Sat, 15 May 2021 02:18:02 +0000 Subject: [PATCH] chore: upgrade gapic-generator-python to 0.46.3 (#150) PiperOrigin-RevId: 373649163 Source-Link: https://github.com/googleapis/googleapis/commit/7e1b14e6c7a9ab96d2db7e4a131981f162446d34 Source-Link: https://github.com/googleapis/googleapis-gen/commit/0a3c7d272d697796db75857bac73905c68e498c3 fix: add async client chore: add autogenerated snippets chore: remove auth, policy, and options from the reserved names list feat: support self-signed JWT flow for service accounts chore: enable GAPIC metadata generation --- docs/dlp_v2/dlp_service.rst | 1 - google/cloud/dlp/__init__.py | 96 +- google/cloud/dlp_v2/__init__.py | 29 +- google/cloud/dlp_v2/gapic_metadata.json | 363 ++++ google/cloud/dlp_v2/services/__init__.py | 1 - .../dlp_v2/services/dlp_service/__init__.py | 2 - .../services/dlp_service/async_client.py | 164 +- .../dlp_v2/services/dlp_service/client.py | 192 +-- .../dlp_v2/services/dlp_service/pagers.py | 12 +- .../dlp_service/transports/__init__.py | 2 - .../services/dlp_service/transports/base.py | 331 ++-- .../services/dlp_service/transports/grpc.py | 52 +- .../dlp_service/transports/grpc_asyncio.py | 53 +- google/cloud/dlp_v2/types/__init__.py | 2 - google/cloud/dlp_v2/types/dlp.py | 941 +++-------- google/cloud/dlp_v2/types/storage.py | 152 +- scripts/fixup_dlp_v2_keywords.py | 73 +- tests/__init__.py | 15 + tests/unit/__init__.py | 15 + tests/unit/gapic/__init__.py | 15 + tests/unit/gapic/dlp_v2/__init__.py | 1 - tests/unit/gapic/dlp_v2/test_dlp_service.py | 1479 +++++++---------- 22 files changed, 1793 insertions(+), 2198 deletions(-) create mode 100644 google/cloud/dlp_v2/gapic_metadata.json create mode 100644 tests/__init__.py create mode 100644 tests/unit/__init__.py create mode 100644 tests/unit/gapic/__init__.py diff --git a/docs/dlp_v2/dlp_service.rst b/docs/dlp_v2/dlp_service.rst index 86272ecb..914da512 100644 --- a/docs/dlp_v2/dlp_service.rst +++ b/docs/dlp_v2/dlp_service.rst @@ -5,7 +5,6 @@ DlpService :members: :inherited-members: - .. automodule:: google.cloud.dlp_v2.services.dlp_service.pagers :members: :inherited-members: diff --git a/google/cloud/dlp/__init__.py b/google/cloud/dlp/__init__.py index cd1bf675..fc2c38e4 100644 --- a/google/cloud/dlp/__init__.py +++ b/google/cloud/dlp/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -15,8 +14,9 @@ # limitations under the License. # -from google.cloud.dlp_v2.services.dlp_service.async_client import DlpServiceAsyncClient from google.cloud.dlp_v2.services.dlp_service.client import DlpServiceClient +from google.cloud.dlp_v2.services.dlp_service.async_client import DlpServiceAsyncClient + from google.cloud.dlp_v2.types.dlp import Action from google.cloud.dlp_v2.types.dlp import ActivateJobTriggerRequest from google.cloud.dlp_v2.types.dlp import AnalyzeDataSourceRiskDetails @@ -30,7 +30,6 @@ from google.cloud.dlp_v2.types.dlp import Container from google.cloud.dlp_v2.types.dlp import ContentItem from google.cloud.dlp_v2.types.dlp import ContentLocation -from google.cloud.dlp_v2.types.dlp import ContentOption from google.cloud.dlp_v2.types.dlp import CreateDeidentifyTemplateRequest from google.cloud.dlp_v2.types.dlp import CreateDlpJobRequest from google.cloud.dlp_v2.types.dlp import CreateInspectTemplateRequest @@ -52,7 +51,6 @@ from google.cloud.dlp_v2.types.dlp import DeleteJobTriggerRequest from google.cloud.dlp_v2.types.dlp import DeleteStoredInfoTypeRequest from google.cloud.dlp_v2.types.dlp import DlpJob -from google.cloud.dlp_v2.types.dlp import DlpJobType from google.cloud.dlp_v2.types.dlp import DocumentLocation from google.cloud.dlp_v2.types.dlp import Error from google.cloud.dlp_v2.types.dlp import ExcludeInfoTypes @@ -75,17 +73,16 @@ from google.cloud.dlp_v2.types.dlp import ImageLocation from google.cloud.dlp_v2.types.dlp import InfoTypeDescription from google.cloud.dlp_v2.types.dlp import InfoTypeStats -from google.cloud.dlp_v2.types.dlp import InfoTypeSupportedBy from google.cloud.dlp_v2.types.dlp import InfoTypeTransformations from google.cloud.dlp_v2.types.dlp import InspectConfig from google.cloud.dlp_v2.types.dlp import InspectContentRequest from google.cloud.dlp_v2.types.dlp import InspectContentResponse from google.cloud.dlp_v2.types.dlp import InspectDataSourceDetails +from google.cloud.dlp_v2.types.dlp import InspectionRule +from google.cloud.dlp_v2.types.dlp import InspectionRuleSet from google.cloud.dlp_v2.types.dlp import InspectJobConfig from google.cloud.dlp_v2.types.dlp import InspectResult from google.cloud.dlp_v2.types.dlp import InspectTemplate -from google.cloud.dlp_v2.types.dlp import InspectionRule -from google.cloud.dlp_v2.types.dlp import InspectionRuleSet from google.cloud.dlp_v2.types.dlp import JobTrigger from google.cloud.dlp_v2.types.dlp import KmsWrappedCryptoKey from google.cloud.dlp_v2.types.dlp import LargeCustomDictionaryConfig @@ -104,9 +101,7 @@ from google.cloud.dlp_v2.types.dlp import ListStoredInfoTypesResponse from google.cloud.dlp_v2.types.dlp import Location from google.cloud.dlp_v2.types.dlp import Manual -from google.cloud.dlp_v2.types.dlp import MatchingType from google.cloud.dlp_v2.types.dlp import MetadataLocation -from google.cloud.dlp_v2.types.dlp import MetadataType from google.cloud.dlp_v2.types.dlp import OutputStorageConfig from google.cloud.dlp_v2.types.dlp import PrimitiveTransformation from google.cloud.dlp_v2.types.dlp import PrivacyMetric @@ -122,7 +117,6 @@ from google.cloud.dlp_v2.types.dlp import RedactImageResponse from google.cloud.dlp_v2.types.dlp import ReidentifyContentRequest from google.cloud.dlp_v2.types.dlp import ReidentifyContentResponse -from google.cloud.dlp_v2.types.dlp import RelationalOperator from google.cloud.dlp_v2.types.dlp import ReplaceValueConfig from google.cloud.dlp_v2.types.dlp import ReplaceWithInfoTypeConfig from google.cloud.dlp_v2.types.dlp import RiskAnalysisJobConfig @@ -131,7 +125,6 @@ from google.cloud.dlp_v2.types.dlp import StorageMetadataLabel from google.cloud.dlp_v2.types.dlp import StoredInfoType from google.cloud.dlp_v2.types.dlp import StoredInfoTypeConfig -from google.cloud.dlp_v2.types.dlp import StoredInfoTypeState from google.cloud.dlp_v2.types.dlp import StoredInfoTypeStats from google.cloud.dlp_v2.types.dlp import StoredInfoTypeVersion from google.cloud.dlp_v2.types.dlp import Table @@ -148,6 +141,13 @@ from google.cloud.dlp_v2.types.dlp import UpdateStoredInfoTypeRequest from google.cloud.dlp_v2.types.dlp import Value from google.cloud.dlp_v2.types.dlp import ValueFrequency +from google.cloud.dlp_v2.types.dlp import ContentOption +from google.cloud.dlp_v2.types.dlp import DlpJobType +from google.cloud.dlp_v2.types.dlp import InfoTypeSupportedBy +from google.cloud.dlp_v2.types.dlp import MatchingType +from google.cloud.dlp_v2.types.dlp import MetadataType +from google.cloud.dlp_v2.types.dlp import RelationalOperator +from google.cloud.dlp_v2.types.dlp import StoredInfoTypeState from google.cloud.dlp_v2.types.storage import BigQueryField from google.cloud.dlp_v2.types.storage import BigQueryKey from google.cloud.dlp_v2.types.storage import BigQueryOptions @@ -161,41 +161,34 @@ from google.cloud.dlp_v2.types.storage import DatastoreOptions from google.cloud.dlp_v2.types.storage import EntityId from google.cloud.dlp_v2.types.storage import FieldId -from google.cloud.dlp_v2.types.storage import FileType from google.cloud.dlp_v2.types.storage import HybridOptions from google.cloud.dlp_v2.types.storage import InfoType from google.cloud.dlp_v2.types.storage import Key from google.cloud.dlp_v2.types.storage import KindExpression -from google.cloud.dlp_v2.types.storage import Likelihood from google.cloud.dlp_v2.types.storage import PartitionId from google.cloud.dlp_v2.types.storage import RecordKey from google.cloud.dlp_v2.types.storage import StorageConfig from google.cloud.dlp_v2.types.storage import StoredType from google.cloud.dlp_v2.types.storage import TableOptions +from google.cloud.dlp_v2.types.storage import FileType +from google.cloud.dlp_v2.types.storage import Likelihood __all__ = ( + "DlpServiceClient", + "DlpServiceAsyncClient", "Action", "ActivateJobTriggerRequest", "AnalyzeDataSourceRiskDetails", - "BigQueryField", - "BigQueryKey", - "BigQueryOptions", - "BigQueryTable", "BoundingBox", "BucketingConfig", "ByteContentItem", "CancelDlpJobRequest", "CharacterMaskConfig", "CharsToIgnore", - "CloudStorageFileSet", - "CloudStorageOptions", - "CloudStoragePath", - "CloudStorageRegexFileSet", "Color", "Container", "ContentItem", "ContentLocation", - "ContentOption", "CreateDeidentifyTemplateRequest", "CreateDlpJobRequest", "CreateInspectTemplateRequest", @@ -205,9 +198,6 @@ "CryptoHashConfig", "CryptoKey", "CryptoReplaceFfxFpeConfig", - "CustomInfoType", - "DatastoreKey", - "DatastoreOptions", "DateShiftConfig", "DateTime", "DeidentifyConfig", @@ -220,17 +210,11 @@ "DeleteJobTriggerRequest", "DeleteStoredInfoTypeRequest", "DlpJob", - "DlpJobType", - "DlpServiceAsyncClient", - "DlpServiceClient", "DocumentLocation", - "EntityId", "Error", "ExcludeInfoTypes", "ExclusionRule", - "FieldId", "FieldTransformation", - "FileType", "Finding", "FinishDlpJobRequest", "FixedSizeBucketingConfig", @@ -245,29 +229,23 @@ "HybridInspectJobTriggerRequest", "HybridInspectResponse", "HybridInspectStatistics", - "HybridOptions", "ImageLocation", - "InfoType", "InfoTypeDescription", "InfoTypeStats", - "InfoTypeSupportedBy", "InfoTypeTransformations", "InspectConfig", "InspectContentRequest", "InspectContentResponse", "InspectDataSourceDetails", + "InspectionRule", + "InspectionRuleSet", "InspectJobConfig", "InspectResult", "InspectTemplate", - "InspectionRule", - "InspectionRuleSet", "JobTrigger", - "Key", - "KindExpression", "KmsWrappedCryptoKey", "LargeCustomDictionaryConfig", "LargeCustomDictionaryStats", - "Likelihood", "ListDeidentifyTemplatesRequest", "ListDeidentifyTemplatesResponse", "ListDlpJobsRequest", @@ -282,18 +260,14 @@ "ListStoredInfoTypesResponse", "Location", "Manual", - "MatchingType", "MetadataLocation", - "MetadataType", "OutputStorageConfig", - "PartitionId", "PrimitiveTransformation", "PrivacyMetric", "QuasiId", "QuoteInfo", "Range", "RecordCondition", - "RecordKey", "RecordLocation", "RecordSuppression", "RecordTransformations", @@ -302,23 +276,18 @@ "RedactImageResponse", "ReidentifyContentRequest", "ReidentifyContentResponse", - "RelationalOperator", "ReplaceValueConfig", "ReplaceWithInfoTypeConfig", "RiskAnalysisJobConfig", "Schedule", "StatisticalTable", - "StorageConfig", "StorageMetadataLabel", "StoredInfoType", "StoredInfoTypeConfig", - "StoredInfoTypeState", "StoredInfoTypeStats", "StoredInfoTypeVersion", - "StoredType", "Table", "TableLocation", - "TableOptions", "TimePartConfig", "TransformationErrorHandling", "TransformationOverview", @@ -331,4 +300,35 @@ "UpdateStoredInfoTypeRequest", "Value", "ValueFrequency", + "ContentOption", + "DlpJobType", + "InfoTypeSupportedBy", + "MatchingType", + "MetadataType", + "RelationalOperator", + "StoredInfoTypeState", + "BigQueryField", + "BigQueryKey", + "BigQueryOptions", + "BigQueryTable", + "CloudStorageFileSet", + "CloudStorageOptions", + "CloudStoragePath", + "CloudStorageRegexFileSet", + "CustomInfoType", + "DatastoreKey", + "DatastoreOptions", + "EntityId", + "FieldId", + "HybridOptions", + "InfoType", + "Key", + "KindExpression", + "PartitionId", + "RecordKey", + "StorageConfig", + "StoredType", + "TableOptions", + "FileType", + "Likelihood", ) diff --git a/google/cloud/dlp_v2/__init__.py b/google/cloud/dlp_v2/__init__.py index 73bacd1a..043c5c92 100644 --- a/google/cloud/dlp_v2/__init__.py +++ b/google/cloud/dlp_v2/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -16,6 +15,8 @@ # from .services.dlp_service import DlpServiceClient +from .services.dlp_service import DlpServiceAsyncClient + from .types.dlp import Action from .types.dlp import ActivateJobTriggerRequest from .types.dlp import AnalyzeDataSourceRiskDetails @@ -29,7 +30,6 @@ from .types.dlp import Container from .types.dlp import ContentItem from .types.dlp import ContentLocation -from .types.dlp import ContentOption from .types.dlp import CreateDeidentifyTemplateRequest from .types.dlp import CreateDlpJobRequest from .types.dlp import CreateInspectTemplateRequest @@ -51,7 +51,6 @@ from .types.dlp import DeleteJobTriggerRequest from .types.dlp import DeleteStoredInfoTypeRequest from .types.dlp import DlpJob -from .types.dlp import DlpJobType from .types.dlp import DocumentLocation from .types.dlp import Error from .types.dlp import ExcludeInfoTypes @@ -74,17 +73,16 @@ from .types.dlp import ImageLocation from .types.dlp import InfoTypeDescription from .types.dlp import InfoTypeStats -from .types.dlp import InfoTypeSupportedBy from .types.dlp import InfoTypeTransformations from .types.dlp import InspectConfig from .types.dlp import InspectContentRequest from .types.dlp import InspectContentResponse from .types.dlp import InspectDataSourceDetails +from .types.dlp import InspectionRule +from .types.dlp import InspectionRuleSet from .types.dlp import InspectJobConfig from .types.dlp import InspectResult from .types.dlp import InspectTemplate -from .types.dlp import InspectionRule -from .types.dlp import InspectionRuleSet from .types.dlp import JobTrigger from .types.dlp import KmsWrappedCryptoKey from .types.dlp import LargeCustomDictionaryConfig @@ -103,9 +101,7 @@ from .types.dlp import ListStoredInfoTypesResponse from .types.dlp import Location from .types.dlp import Manual -from .types.dlp import MatchingType from .types.dlp import MetadataLocation -from .types.dlp import MetadataType from .types.dlp import OutputStorageConfig from .types.dlp import PrimitiveTransformation from .types.dlp import PrivacyMetric @@ -121,7 +117,6 @@ from .types.dlp import RedactImageResponse from .types.dlp import ReidentifyContentRequest from .types.dlp import ReidentifyContentResponse -from .types.dlp import RelationalOperator from .types.dlp import ReplaceValueConfig from .types.dlp import ReplaceWithInfoTypeConfig from .types.dlp import RiskAnalysisJobConfig @@ -130,7 +125,6 @@ from .types.dlp import StorageMetadataLabel from .types.dlp import StoredInfoType from .types.dlp import StoredInfoTypeConfig -from .types.dlp import StoredInfoTypeState from .types.dlp import StoredInfoTypeStats from .types.dlp import StoredInfoTypeVersion from .types.dlp import Table @@ -147,6 +141,13 @@ from .types.dlp import UpdateStoredInfoTypeRequest from .types.dlp import Value from .types.dlp import ValueFrequency +from .types.dlp import ContentOption +from .types.dlp import DlpJobType +from .types.dlp import InfoTypeSupportedBy +from .types.dlp import MatchingType +from .types.dlp import MetadataType +from .types.dlp import RelationalOperator +from .types.dlp import StoredInfoTypeState from .types.storage import BigQueryField from .types.storage import BigQueryKey from .types.storage import BigQueryOptions @@ -160,20 +161,20 @@ from .types.storage import DatastoreOptions from .types.storage import EntityId from .types.storage import FieldId -from .types.storage import FileType from .types.storage import HybridOptions from .types.storage import InfoType from .types.storage import Key from .types.storage import KindExpression -from .types.storage import Likelihood from .types.storage import PartitionId from .types.storage import RecordKey from .types.storage import StorageConfig from .types.storage import StoredType from .types.storage import TableOptions - +from .types.storage import FileType +from .types.storage import Likelihood __all__ = ( + "DlpServiceAsyncClient", "Action", "ActivateJobTriggerRequest", "AnalyzeDataSourceRiskDetails", @@ -221,6 +222,7 @@ "DeleteStoredInfoTypeRequest", "DlpJob", "DlpJobType", + "DlpServiceClient", "DocumentLocation", "EntityId", "Error", @@ -329,5 +331,4 @@ "UpdateStoredInfoTypeRequest", "Value", "ValueFrequency", - "DlpServiceClient", ) diff --git a/google/cloud/dlp_v2/gapic_metadata.json b/google/cloud/dlp_v2/gapic_metadata.json new file mode 100644 index 00000000..df73928b --- /dev/null +++ b/google/cloud/dlp_v2/gapic_metadata.json @@ -0,0 +1,363 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.dlp_v2", + "protoPackage": "google.privacy.dlp.v2", + "schema": "1.0", + "services": { + "DlpService": { + "clients": { + "grpc": { + "libraryClient": "DlpServiceClient", + "rpcs": { + "ActivateJobTrigger": { + "methods": [ + "activate_job_trigger" + ] + }, + "CancelDlpJob": { + "methods": [ + "cancel_dlp_job" + ] + }, + "CreateDeidentifyTemplate": { + "methods": [ + "create_deidentify_template" + ] + }, + "CreateDlpJob": { + "methods": [ + "create_dlp_job" + ] + }, + "CreateInspectTemplate": { + "methods": [ + "create_inspect_template" + ] + }, + "CreateJobTrigger": { + "methods": [ + "create_job_trigger" + ] + }, + "CreateStoredInfoType": { + "methods": [ + "create_stored_info_type" + ] + }, + "DeidentifyContent": { + "methods": [ + "deidentify_content" + ] + }, + "DeleteDeidentifyTemplate": { + "methods": [ + "delete_deidentify_template" + ] + }, + "DeleteDlpJob": { + "methods": [ + "delete_dlp_job" + ] + }, + "DeleteInspectTemplate": { + "methods": [ + "delete_inspect_template" + ] + }, + "DeleteJobTrigger": { + "methods": [ + "delete_job_trigger" + ] + }, + "DeleteStoredInfoType": { + "methods": [ + "delete_stored_info_type" + ] + }, + "FinishDlpJob": { + "methods": [ + "finish_dlp_job" + ] + }, + "GetDeidentifyTemplate": { + "methods": [ + "get_deidentify_template" + ] + }, + "GetDlpJob": { + "methods": [ + "get_dlp_job" + ] + }, + "GetInspectTemplate": { + "methods": [ + "get_inspect_template" + ] + }, + "GetJobTrigger": { + "methods": [ + "get_job_trigger" + ] + }, + "GetStoredInfoType": { + "methods": [ + "get_stored_info_type" + ] + }, + "HybridInspectDlpJob": { + "methods": [ + "hybrid_inspect_dlp_job" + ] + }, + "HybridInspectJobTrigger": { + "methods": [ + "hybrid_inspect_job_trigger" + ] + }, + "InspectContent": { + "methods": [ + "inspect_content" + ] + }, + "ListDeidentifyTemplates": { + "methods": [ + "list_deidentify_templates" + ] + }, + "ListDlpJobs": { + "methods": [ + "list_dlp_jobs" + ] + }, + "ListInfoTypes": { + "methods": [ + "list_info_types" + ] + }, + "ListInspectTemplates": { + "methods": [ + "list_inspect_templates" + ] + }, + "ListJobTriggers": { + "methods": [ + "list_job_triggers" + ] + }, + "ListStoredInfoTypes": { + "methods": [ + "list_stored_info_types" + ] + }, + "RedactImage": { + "methods": [ + "redact_image" + ] + }, + "ReidentifyContent": { + "methods": [ + "reidentify_content" + ] + }, + "UpdateDeidentifyTemplate": { + "methods": [ + "update_deidentify_template" + ] + }, + "UpdateInspectTemplate": { + "methods": [ + "update_inspect_template" + ] + }, + "UpdateJobTrigger": { + "methods": [ + "update_job_trigger" + ] + }, + "UpdateStoredInfoType": { + "methods": [ + "update_stored_info_type" + ] + } + } + }, + "grpc-async": { + "libraryClient": "DlpServiceAsyncClient", + "rpcs": { + "ActivateJobTrigger": { + "methods": [ + "activate_job_trigger" + ] + }, + "CancelDlpJob": { + "methods": [ + "cancel_dlp_job" + ] + }, + "CreateDeidentifyTemplate": { + "methods": [ + "create_deidentify_template" + ] + }, + "CreateDlpJob": { + "methods": [ + "create_dlp_job" + ] + }, + "CreateInspectTemplate": { + "methods": [ + "create_inspect_template" + ] + }, + "CreateJobTrigger": { + "methods": [ + "create_job_trigger" + ] + }, + "CreateStoredInfoType": { + "methods": [ + "create_stored_info_type" + ] + }, + "DeidentifyContent": { + "methods": [ + "deidentify_content" + ] + }, + "DeleteDeidentifyTemplate": { + "methods": [ + "delete_deidentify_template" + ] + }, + "DeleteDlpJob": { + "methods": [ + "delete_dlp_job" + ] + }, + "DeleteInspectTemplate": { + "methods": [ + "delete_inspect_template" + ] + }, + "DeleteJobTrigger": { + "methods": [ + "delete_job_trigger" + ] + }, + "DeleteStoredInfoType": { + "methods": [ + "delete_stored_info_type" + ] + }, + "FinishDlpJob": { + "methods": [ + "finish_dlp_job" + ] + }, + "GetDeidentifyTemplate": { + "methods": [ + "get_deidentify_template" + ] + }, + "GetDlpJob": { + "methods": [ + "get_dlp_job" + ] + }, + "GetInspectTemplate": { + "methods": [ + "get_inspect_template" + ] + }, + "GetJobTrigger": { + "methods": [ + "get_job_trigger" + ] + }, + "GetStoredInfoType": { + "methods": [ + "get_stored_info_type" + ] + }, + "HybridInspectDlpJob": { + "methods": [ + "hybrid_inspect_dlp_job" + ] + }, + "HybridInspectJobTrigger": { + "methods": [ + "hybrid_inspect_job_trigger" + ] + }, + "InspectContent": { + "methods": [ + "inspect_content" + ] + }, + "ListDeidentifyTemplates": { + "methods": [ + "list_deidentify_templates" + ] + }, + "ListDlpJobs": { + "methods": [ + "list_dlp_jobs" + ] + }, + "ListInfoTypes": { + "methods": [ + "list_info_types" + ] + }, + "ListInspectTemplates": { + "methods": [ + "list_inspect_templates" + ] + }, + "ListJobTriggers": { + "methods": [ + "list_job_triggers" + ] + }, + "ListStoredInfoTypes": { + "methods": [ + "list_stored_info_types" + ] + }, + "RedactImage": { + "methods": [ + "redact_image" + ] + }, + "ReidentifyContent": { + "methods": [ + "reidentify_content" + ] + }, + "UpdateDeidentifyTemplate": { + "methods": [ + "update_deidentify_template" + ] + }, + "UpdateInspectTemplate": { + "methods": [ + "update_inspect_template" + ] + }, + "UpdateJobTrigger": { + "methods": [ + "update_job_trigger" + ] + }, + "UpdateStoredInfoType": { + "methods": [ + "update_stored_info_type" + ] + } + } + } + } + } + } +} diff --git a/google/cloud/dlp_v2/services/__init__.py b/google/cloud/dlp_v2/services/__init__.py index 42ffdf2b..4de65971 100644 --- a/google/cloud/dlp_v2/services/__init__.py +++ b/google/cloud/dlp_v2/services/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/google/cloud/dlp_v2/services/dlp_service/__init__.py b/google/cloud/dlp_v2/services/dlp_service/__init__.py index c55616fa..48aaaa91 100644 --- a/google/cloud/dlp_v2/services/dlp_service/__init__.py +++ b/google/cloud/dlp_v2/services/dlp_service/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from .client import DlpServiceClient from .async_client import DlpServiceAsyncClient diff --git a/google/cloud/dlp_v2/services/dlp_service/async_client.py b/google/cloud/dlp_v2/services/dlp_service/async_client.py index 30ffb82f..e5ae1cee 100644 --- a/google/cloud/dlp_v2/services/dlp_service/async_client.py +++ b/google/cloud/dlp_v2/services/dlp_service/async_client.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from collections import OrderedDict import functools import re @@ -22,17 +20,16 @@ import pkg_resources import google.api_core.client_options as ClientOptions # type: ignore -from google.api_core import exceptions # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore from google.cloud.dlp_v2.services.dlp_service import pagers from google.cloud.dlp_v2.types import dlp -from google.protobuf import field_mask_pb2 as field_mask # type: ignore -from google.protobuf import timestamp_pb2 as timestamp # type: ignore - +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore from .transports.base import DlpServiceTransport, DEFAULT_CLIENT_INFO from .transports.grpc_asyncio import DlpServiceGrpcAsyncIOTransport from .client import DlpServiceClient @@ -75,25 +72,20 @@ class DlpServiceAsyncClient: parse_stored_info_type_path = staticmethod( DlpServiceClient.parse_stored_info_type_path ) - common_billing_account_path = staticmethod( DlpServiceClient.common_billing_account_path ) parse_common_billing_account_path = staticmethod( DlpServiceClient.parse_common_billing_account_path ) - common_folder_path = staticmethod(DlpServiceClient.common_folder_path) parse_common_folder_path = staticmethod(DlpServiceClient.parse_common_folder_path) - common_organization_path = staticmethod(DlpServiceClient.common_organization_path) parse_common_organization_path = staticmethod( DlpServiceClient.parse_common_organization_path ) - common_project_path = staticmethod(DlpServiceClient.common_project_path) parse_common_project_path = staticmethod(DlpServiceClient.parse_common_project_path) - common_location_path = staticmethod(DlpServiceClient.common_location_path) parse_common_location_path = staticmethod( DlpServiceClient.parse_common_location_path @@ -101,7 +93,8 @@ class DlpServiceAsyncClient: @classmethod def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials info. + """Creates an instance of this client using the provided credentials + info. Args: info (dict): The service account private key info. @@ -116,7 +109,7 @@ def from_service_account_info(cls, info: dict, *args, **kwargs): @classmethod def from_service_account_file(cls, filename: str, *args, **kwargs): """Creates an instance of this client using the provided credentials - file. + file. Args: filename (str): The path to the service account private key json @@ -133,7 +126,7 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): @property def transport(self) -> DlpServiceTransport: - """Return the transport used by the client instance. + """Returns the transport used by the client instance. Returns: DlpServiceTransport: The transport used by the client instance. @@ -147,12 +140,12 @@ def transport(self) -> DlpServiceTransport: def __init__( self, *, - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, transport: Union[str, DlpServiceTransport] = "grpc_asyncio", client_options: ClientOptions = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: - """Instantiate the dlp service client. + """Instantiates the dlp service client. Args: credentials (Optional[google.auth.credentials.Credentials]): The @@ -184,7 +177,6 @@ def __init__( google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport creation failed for any reason. """ - self._client = DlpServiceClient( credentials=credentials, transport=transport, @@ -215,7 +207,6 @@ async def inspect_content( request (:class:`google.cloud.dlp_v2.types.InspectContentRequest`): The request object. Request to search for potentially sensitive info in a ContentItem. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -227,7 +218,6 @@ async def inspect_content( Results of inspecting an item. """ # Create or coerce a protobuf request object. - request = dlp.InspectContentRequest(request) # Wrap the RPC method; this adds retry and timeout information, @@ -239,7 +229,8 @@ async def inspect_content( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), deadline=300.0, ), @@ -283,7 +274,6 @@ async def redact_image( The request object. Request to search for potentially sensitive info in an image and redact it by covering it with a colored rectangle. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -295,7 +285,6 @@ async def redact_image( Results of redacting an image. """ # Create or coerce a protobuf request object. - request = dlp.RedactImageRequest(request) # Wrap the RPC method; this adds retry and timeout information, @@ -307,7 +296,8 @@ async def redact_image( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), deadline=300.0, ), @@ -350,7 +340,6 @@ async def deidentify_content( request (:class:`google.cloud.dlp_v2.types.DeidentifyContentRequest`): The request object. Request to de-identify a list of items. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -364,7 +353,6 @@ async def deidentify_content( """ # Create or coerce a protobuf request object. - request = dlp.DeidentifyContentRequest(request) # Wrap the RPC method; this adds retry and timeout information, @@ -376,7 +364,8 @@ async def deidentify_content( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), deadline=300.0, ), @@ -411,7 +400,6 @@ async def reidentify_content( Args: request (:class:`google.cloud.dlp_v2.types.ReidentifyContentRequest`): The request object. Request to re-identify an item. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -423,7 +411,6 @@ async def reidentify_content( Results of re-identifying a item. """ # Create or coerce a protobuf request object. - request = dlp.ReidentifyContentRequest(request) # Wrap the RPC method; this adds retry and timeout information, @@ -435,7 +422,8 @@ async def reidentify_content( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), deadline=300.0, ), @@ -484,7 +472,6 @@ async def list_info_types( This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -511,7 +498,6 @@ async def list_info_types( # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent @@ -524,7 +510,8 @@ async def list_info_types( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), deadline=300.0, ), @@ -593,7 +580,6 @@ async def create_inspect_template( This corresponds to the ``inspect_template`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -625,7 +611,6 @@ async def create_inspect_template( # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent if inspect_template is not None: @@ -657,7 +642,7 @@ async def update_inspect_template( *, name: str = None, inspect_template: dlp.InspectTemplate = None, - update_mask: field_mask.FieldMask = None, + update_mask: field_mask_pb2.FieldMask = None, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), @@ -691,7 +676,6 @@ async def update_inspect_template( This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -723,7 +707,6 @@ async def update_inspect_template( # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name if inspect_template is not None: @@ -777,7 +760,6 @@ async def get_inspect_template( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -809,7 +791,6 @@ async def get_inspect_template( # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -822,7 +803,8 @@ async def get_inspect_template( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), deadline=300.0, ), @@ -888,7 +870,6 @@ async def list_inspect_templates( This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -918,7 +899,6 @@ async def list_inspect_templates( # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent @@ -931,7 +911,8 @@ async def list_inspect_templates( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), deadline=300.0, ), @@ -983,7 +964,6 @@ async def delete_inspect_template( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1004,7 +984,6 @@ async def delete_inspect_template( # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -1017,7 +996,8 @@ async def delete_inspect_template( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), deadline=300.0, ), @@ -1092,7 +1072,6 @@ async def create_deidentify_template( This corresponds to the ``deidentify_template`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1122,7 +1101,6 @@ async def create_deidentify_template( # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent if deidentify_template is not None: @@ -1154,7 +1132,7 @@ async def update_deidentify_template( *, name: str = None, deidentify_template: dlp.DeidentifyTemplate = None, - update_mask: field_mask.FieldMask = None, + update_mask: field_mask_pb2.FieldMask = None, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), @@ -1188,7 +1166,6 @@ async def update_deidentify_template( This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1218,7 +1195,6 @@ async def update_deidentify_template( # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name if deidentify_template is not None: @@ -1272,7 +1248,6 @@ async def get_deidentify_template( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1302,7 +1277,6 @@ async def get_deidentify_template( # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -1315,7 +1289,8 @@ async def get_deidentify_template( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), deadline=300.0, ), @@ -1381,7 +1356,6 @@ async def list_deidentify_templates( This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1411,7 +1385,6 @@ async def list_deidentify_templates( # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent @@ -1424,7 +1397,8 @@ async def list_deidentify_templates( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), deadline=300.0, ), @@ -1476,7 +1450,6 @@ async def delete_deidentify_template( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1497,7 +1470,6 @@ async def delete_deidentify_template( # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -1510,7 +1482,8 @@ async def delete_deidentify_template( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), deadline=300.0, ), @@ -1578,7 +1551,6 @@ async def create_job_trigger( This corresponds to the ``job_trigger`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1607,7 +1579,6 @@ async def create_job_trigger( # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent if job_trigger is not None: @@ -1639,7 +1610,7 @@ async def update_job_trigger( *, name: str = None, job_trigger: dlp.JobTrigger = None, - update_mask: field_mask.FieldMask = None, + update_mask: field_mask_pb2.FieldMask = None, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), @@ -1672,7 +1643,6 @@ async def update_job_trigger( This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1701,7 +1671,6 @@ async def update_job_trigger( # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name if job_trigger is not None: @@ -1759,7 +1728,6 @@ async def hybrid_inspect_job_trigger( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1786,7 +1754,6 @@ async def hybrid_inspect_job_trigger( # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -1834,7 +1801,6 @@ async def get_job_trigger( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1863,7 +1829,6 @@ async def get_job_trigger( # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -1876,7 +1841,8 @@ async def get_job_trigger( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), deadline=300.0, ), @@ -1936,7 +1902,6 @@ async def list_job_triggers( This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1965,7 +1930,6 @@ async def list_job_triggers( # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent @@ -1978,7 +1942,8 @@ async def list_job_triggers( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), deadline=300.0, ), @@ -2029,7 +1994,6 @@ async def delete_job_trigger( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2050,7 +2014,6 @@ async def delete_job_trigger( # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -2063,7 +2026,8 @@ async def delete_job_trigger( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), deadline=300.0, ), @@ -2098,7 +2062,6 @@ async def activate_job_trigger( request (:class:`google.cloud.dlp_v2.types.ActivateJobTriggerRequest`): The request object. Request message for ActivateJobTrigger. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2112,7 +2075,6 @@ async def activate_job_trigger( """ # Create or coerce a protobuf request object. - request = dlp.ActivateJobTriggerRequest(request) # Wrap the RPC method; this adds retry and timeout information, @@ -2200,7 +2162,6 @@ async def create_dlp_job( This corresponds to the ``risk_job`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2227,7 +2188,6 @@ async def create_dlp_job( # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent if inspect_job is not None: @@ -2298,7 +2258,6 @@ async def list_dlp_jobs( This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2328,7 +2287,6 @@ async def list_dlp_jobs( # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent @@ -2341,7 +2299,8 @@ async def list_dlp_jobs( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), deadline=300.0, ), @@ -2392,7 +2351,6 @@ async def get_dlp_job( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2419,7 +2377,6 @@ async def get_dlp_job( # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -2432,7 +2389,8 @@ async def get_dlp_job( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), deadline=300.0, ), @@ -2479,7 +2437,6 @@ async def delete_dlp_job( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2500,7 +2457,6 @@ async def delete_dlp_job( # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -2513,7 +2469,8 @@ async def delete_dlp_job( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), deadline=300.0, ), @@ -2551,7 +2508,6 @@ async def cancel_dlp_job( request (:class:`google.cloud.dlp_v2.types.CancelDlpJobRequest`): The request object. The request message for canceling a DLP job. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2559,7 +2515,6 @@ async def cancel_dlp_job( sent along with the request as metadata. """ # Create or coerce a protobuf request object. - request = dlp.CancelDlpJobRequest(request) # Wrap the RPC method; this adds retry and timeout information, @@ -2636,7 +2591,6 @@ async def create_stored_info_type( This corresponds to the ``config`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2664,7 +2618,6 @@ async def create_stored_info_type( # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent if config is not None: @@ -2696,7 +2649,7 @@ async def update_stored_info_type( *, name: str = None, config: dlp.StoredInfoTypeConfig = None, - update_mask: field_mask.FieldMask = None, + update_mask: field_mask_pb2.FieldMask = None, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), @@ -2736,7 +2689,6 @@ async def update_stored_info_type( This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2764,7 +2716,6 @@ async def update_stored_info_type( # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name if config is not None: @@ -2818,7 +2769,6 @@ async def get_stored_info_type( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2846,7 +2796,6 @@ async def get_stored_info_type( # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -2859,7 +2808,8 @@ async def get_stored_info_type( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), deadline=300.0, ), @@ -2925,7 +2875,6 @@ async def list_stored_info_types( This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2955,7 +2904,6 @@ async def list_stored_info_types( # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent @@ -2968,7 +2916,8 @@ async def list_stored_info_types( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), deadline=300.0, ), @@ -3020,7 +2969,6 @@ async def delete_stored_info_type( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -3041,7 +2989,6 @@ async def delete_stored_info_type( # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -3054,7 +3001,8 @@ async def delete_stored_info_type( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), deadline=300.0, ), @@ -3102,7 +3050,6 @@ async def hybrid_inspect_dlp_job( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -3129,7 +3076,6 @@ async def hybrid_inspect_dlp_job( # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -3172,7 +3118,6 @@ async def finish_dlp_job( request (:class:`google.cloud.dlp_v2.types.FinishDlpJobRequest`): The request object. The request message for finishing a DLP hybrid job. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -3180,7 +3125,6 @@ async def finish_dlp_job( sent along with the request as metadata. """ # Create or coerce a protobuf request object. - request = dlp.FinishDlpJobRequest(request) # Wrap the RPC method; this adds retry and timeout information, diff --git a/google/cloud/dlp_v2/services/dlp_service/client.py b/google/cloud/dlp_v2/services/dlp_service/client.py index 12fa3463..3d412897 100644 --- a/google/cloud/dlp_v2/services/dlp_service/client.py +++ b/google/cloud/dlp_v2/services/dlp_service/client.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from collections import OrderedDict from distutils import util import os @@ -23,10 +21,10 @@ import pkg_resources from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore @@ -34,9 +32,8 @@ from google.cloud.dlp_v2.services.dlp_service import pagers from google.cloud.dlp_v2.types import dlp -from google.protobuf import field_mask_pb2 as field_mask # type: ignore -from google.protobuf import timestamp_pb2 as timestamp # type: ignore - +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore from .transports.base import DlpServiceTransport, DEFAULT_CLIENT_INFO from .transports.grpc import DlpServiceGrpcTransport from .transports.grpc_asyncio import DlpServiceGrpcAsyncIOTransport @@ -55,7 +52,7 @@ class DlpServiceClientMeta(type): _transport_registry["grpc_asyncio"] = DlpServiceGrpcAsyncIOTransport def get_transport_class(cls, label: str = None,) -> Type[DlpServiceTransport]: - """Return an appropriate transport class. + """Returns an appropriate transport class. Args: label: The name of the desired transport. If none is @@ -87,7 +84,8 @@ class DlpServiceClient(metaclass=DlpServiceClientMeta): @staticmethod def _get_default_mtls_endpoint(api_endpoint): - """Convert api endpoint to mTLS endpoint. + """Converts api endpoint to mTLS endpoint. + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. Args: @@ -121,7 +119,8 @@ def _get_default_mtls_endpoint(api_endpoint): @classmethod def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials info. + """Creates an instance of this client using the provided credentials + info. Args: info (dict): The service account private key info. @@ -138,7 +137,7 @@ def from_service_account_info(cls, info: dict, *args, **kwargs): @classmethod def from_service_account_file(cls, filename: str, *args, **kwargs): """Creates an instance of this client using the provided credentials - file. + file. Args: filename (str): The path to the service account private key json @@ -157,23 +156,24 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): @property def transport(self) -> DlpServiceTransport: - """Return the transport used by the client instance. + """Returns the transport used by the client instance. Returns: - DlpServiceTransport: The transport used by the client instance. + DlpServiceTransport: The transport used by the client + instance. """ return self._transport @staticmethod def deidentify_template_path(organization: str, deidentify_template: str,) -> str: - """Return a fully-qualified deidentify_template string.""" + """Returns a fully-qualified deidentify_template string.""" return "organizations/{organization}/deidentifyTemplates/{deidentify_template}".format( organization=organization, deidentify_template=deidentify_template, ) @staticmethod def parse_deidentify_template_path(path: str) -> Dict[str, str]: - """Parse a deidentify_template path into its component segments.""" + """Parses a deidentify_template path into its component segments.""" m = re.match( r"^organizations/(?P.+?)/deidentifyTemplates/(?P.+?)$", path, @@ -182,38 +182,38 @@ def parse_deidentify_template_path(path: str) -> Dict[str, str]: @staticmethod def dlp_content_path(project: str,) -> str: - """Return a fully-qualified dlp_content string.""" + """Returns a fully-qualified dlp_content string.""" return "projects/{project}/dlpContent".format(project=project,) @staticmethod def parse_dlp_content_path(path: str) -> Dict[str, str]: - """Parse a dlp_content path into its component segments.""" + """Parses a dlp_content path into its component segments.""" m = re.match(r"^projects/(?P.+?)/dlpContent$", path) return m.groupdict() if m else {} @staticmethod def dlp_job_path(project: str, dlp_job: str,) -> str: - """Return a fully-qualified dlp_job string.""" + """Returns a fully-qualified dlp_job string.""" return "projects/{project}/dlpJobs/{dlp_job}".format( project=project, dlp_job=dlp_job, ) @staticmethod def parse_dlp_job_path(path: str) -> Dict[str, str]: - """Parse a dlp_job path into its component segments.""" + """Parses a dlp_job path into its component segments.""" m = re.match(r"^projects/(?P.+?)/dlpJobs/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod def finding_path(project: str, location: str, finding: str,) -> str: - """Return a fully-qualified finding string.""" + """Returns a fully-qualified finding string.""" return "projects/{project}/locations/{location}/findings/{finding}".format( project=project, location=location, finding=finding, ) @staticmethod def parse_finding_path(path: str) -> Dict[str, str]: - """Parse a finding path into its component segments.""" + """Parses a finding path into its component segments.""" m = re.match( r"^projects/(?P.+?)/locations/(?P.+?)/findings/(?P.+?)$", path, @@ -222,14 +222,14 @@ def parse_finding_path(path: str) -> Dict[str, str]: @staticmethod def inspect_template_path(organization: str, inspect_template: str,) -> str: - """Return a fully-qualified inspect_template string.""" + """Returns a fully-qualified inspect_template string.""" return "organizations/{organization}/inspectTemplates/{inspect_template}".format( organization=organization, inspect_template=inspect_template, ) @staticmethod def parse_inspect_template_path(path: str) -> Dict[str, str]: - """Parse a inspect_template path into its component segments.""" + """Parses a inspect_template path into its component segments.""" m = re.match( r"^organizations/(?P.+?)/inspectTemplates/(?P.+?)$", path, @@ -238,14 +238,14 @@ def parse_inspect_template_path(path: str) -> Dict[str, str]: @staticmethod def job_trigger_path(project: str, job_trigger: str,) -> str: - """Return a fully-qualified job_trigger string.""" + """Returns a fully-qualified job_trigger string.""" return "projects/{project}/jobTriggers/{job_trigger}".format( project=project, job_trigger=job_trigger, ) @staticmethod def parse_job_trigger_path(path: str) -> Dict[str, str]: - """Parse a job_trigger path into its component segments.""" + """Parses a job_trigger path into its component segments.""" m = re.match( r"^projects/(?P.+?)/jobTriggers/(?P.+?)$", path ) @@ -253,14 +253,14 @@ def parse_job_trigger_path(path: str) -> Dict[str, str]: @staticmethod def stored_info_type_path(organization: str, stored_info_type: str,) -> str: - """Return a fully-qualified stored_info_type string.""" + """Returns a fully-qualified stored_info_type string.""" return "organizations/{organization}/storedInfoTypes/{stored_info_type}".format( organization=organization, stored_info_type=stored_info_type, ) @staticmethod def parse_stored_info_type_path(path: str) -> Dict[str, str]: - """Parse a stored_info_type path into its component segments.""" + """Parses a stored_info_type path into its component segments.""" m = re.match( r"^organizations/(?P.+?)/storedInfoTypes/(?P.+?)$", path, @@ -269,7 +269,7 @@ def parse_stored_info_type_path(path: str) -> Dict[str, str]: @staticmethod def common_billing_account_path(billing_account: str,) -> str: - """Return a fully-qualified billing_account string.""" + """Returns a fully-qualified billing_account string.""" return "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -282,7 +282,7 @@ def parse_common_billing_account_path(path: str) -> Dict[str, str]: @staticmethod def common_folder_path(folder: str,) -> str: - """Return a fully-qualified folder string.""" + """Returns a fully-qualified folder string.""" return "folders/{folder}".format(folder=folder,) @staticmethod @@ -293,7 +293,7 @@ def parse_common_folder_path(path: str) -> Dict[str, str]: @staticmethod def common_organization_path(organization: str,) -> str: - """Return a fully-qualified organization string.""" + """Returns a fully-qualified organization string.""" return "organizations/{organization}".format(organization=organization,) @staticmethod @@ -304,7 +304,7 @@ def parse_common_organization_path(path: str) -> Dict[str, str]: @staticmethod def common_project_path(project: str,) -> str: - """Return a fully-qualified project string.""" + """Returns a fully-qualified project string.""" return "projects/{project}".format(project=project,) @staticmethod @@ -315,7 +315,7 @@ def parse_common_project_path(path: str) -> Dict[str, str]: @staticmethod def common_location_path(project: str, location: str,) -> str: - """Return a fully-qualified location string.""" + """Returns a fully-qualified location string.""" return "projects/{project}/locations/{location}".format( project=project, location=location, ) @@ -329,12 +329,12 @@ def parse_common_location_path(path: str) -> Dict[str, str]: def __init__( self, *, - credentials: Optional[credentials.Credentials] = None, + credentials: Optional[ga_credentials.Credentials] = None, transport: Union[str, DlpServiceTransport, None] = None, client_options: Optional[client_options_lib.ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: - """Instantiate the dlp service client. + """Instantiates the dlp service client. Args: credentials (Optional[google.auth.credentials.Credentials]): The @@ -389,9 +389,10 @@ def __init__( client_cert_source_func = client_options.client_cert_source else: is_mtls = mtls.has_default_client_cert_source() - client_cert_source_func = ( - mtls.default_client_cert_source() if is_mtls else None - ) + if is_mtls: + client_cert_source_func = mtls.default_client_cert_source() + else: + client_cert_source_func = None # Figure out which api endpoint to use. if client_options.api_endpoint is not None: @@ -403,12 +404,14 @@ def __init__( elif use_mtls_env == "always": api_endpoint = self.DEFAULT_MTLS_ENDPOINT elif use_mtls_env == "auto": - api_endpoint = ( - self.DEFAULT_MTLS_ENDPOINT if is_mtls else self.DEFAULT_ENDPOINT - ) + if is_mtls: + api_endpoint = self.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = self.DEFAULT_ENDPOINT else: raise MutualTLSChannelError( - "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted values: never, auto, always" + "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted " + "values: never, auto, always" ) # Save or instantiate the transport. @@ -423,8 +426,8 @@ def __init__( ) if client_options.scopes: raise ValueError( - "When providing a transport instance, " - "provide its scopes directly." + "When providing a transport instance, provide its scopes " + "directly." ) self._transport = transport else: @@ -462,7 +465,6 @@ def inspect_content( request (google.cloud.dlp_v2.types.InspectContentRequest): The request object. Request to search for potentially sensitive info in a ContentItem. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -474,7 +476,6 @@ def inspect_content( Results of inspecting an item. """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes # in a dlp.InspectContentRequest. # There's no risk of modifying the input as we've already verified @@ -522,7 +523,6 @@ def redact_image( The request object. Request to search for potentially sensitive info in an image and redact it by covering it with a colored rectangle. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -534,7 +534,6 @@ def redact_image( Results of redacting an image. """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes # in a dlp.RedactImageRequest. # There's no risk of modifying the input as we've already verified @@ -581,7 +580,6 @@ def deidentify_content( request (google.cloud.dlp_v2.types.DeidentifyContentRequest): The request object. Request to de-identify a list of items. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -595,7 +593,6 @@ def deidentify_content( """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes # in a dlp.DeidentifyContentRequest. # There's no risk of modifying the input as we've already verified @@ -634,7 +631,6 @@ def reidentify_content( Args: request (google.cloud.dlp_v2.types.ReidentifyContentRequest): The request object. Request to re-identify an item. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -646,7 +642,6 @@ def reidentify_content( Results of re-identifying a item. """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes # in a dlp.ReidentifyContentRequest. # There's no risk of modifying the input as we've already verified @@ -699,7 +694,6 @@ def list_info_types( This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -728,10 +722,8 @@ def list_info_types( # there are no flattened fields. if not isinstance(request, dlp.ListInfoTypesRequest): request = dlp.ListInfoTypesRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent @@ -800,7 +792,6 @@ def create_inspect_template( This corresponds to the ``inspect_template`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -834,10 +825,8 @@ def create_inspect_template( # there are no flattened fields. if not isinstance(request, dlp.CreateInspectTemplateRequest): request = dlp.CreateInspectTemplateRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent if inspect_template is not None: @@ -865,7 +854,7 @@ def update_inspect_template( *, name: str = None, inspect_template: dlp.InspectTemplate = None, - update_mask: field_mask.FieldMask = None, + update_mask: field_mask_pb2.FieldMask = None, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), @@ -899,7 +888,6 @@ def update_inspect_template( This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -933,10 +921,8 @@ def update_inspect_template( # there are no flattened fields. if not isinstance(request, dlp.UpdateInspectTemplateRequest): request = dlp.UpdateInspectTemplateRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name if inspect_template is not None: @@ -986,7 +972,6 @@ def get_inspect_template( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1020,10 +1005,8 @@ def get_inspect_template( # there are no flattened fields. if not isinstance(request, dlp.GetInspectTemplateRequest): request = dlp.GetInspectTemplateRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -1089,7 +1072,6 @@ def list_inspect_templates( This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1121,10 +1103,8 @@ def list_inspect_templates( # there are no flattened fields. if not isinstance(request, dlp.ListInspectTemplatesRequest): request = dlp.ListInspectTemplatesRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent @@ -1176,7 +1156,6 @@ def delete_inspect_template( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1199,10 +1178,8 @@ def delete_inspect_template( # there are no flattened fields. if not isinstance(request, dlp.DeleteInspectTemplateRequest): request = dlp.DeleteInspectTemplateRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -1277,7 +1254,6 @@ def create_deidentify_template( This corresponds to the ``deidentify_template`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1309,10 +1285,8 @@ def create_deidentify_template( # there are no flattened fields. if not isinstance(request, dlp.CreateDeidentifyTemplateRequest): request = dlp.CreateDeidentifyTemplateRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent if deidentify_template is not None: @@ -1342,7 +1316,7 @@ def update_deidentify_template( *, name: str = None, deidentify_template: dlp.DeidentifyTemplate = None, - update_mask: field_mask.FieldMask = None, + update_mask: field_mask_pb2.FieldMask = None, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), @@ -1376,7 +1350,6 @@ def update_deidentify_template( This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1408,10 +1381,8 @@ def update_deidentify_template( # there are no flattened fields. if not isinstance(request, dlp.UpdateDeidentifyTemplateRequest): request = dlp.UpdateDeidentifyTemplateRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name if deidentify_template is not None: @@ -1463,7 +1434,6 @@ def get_deidentify_template( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1495,10 +1465,8 @@ def get_deidentify_template( # there are no flattened fields. if not isinstance(request, dlp.GetDeidentifyTemplateRequest): request = dlp.GetDeidentifyTemplateRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -1564,7 +1532,6 @@ def list_deidentify_templates( This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1596,10 +1563,8 @@ def list_deidentify_templates( # there are no flattened fields. if not isinstance(request, dlp.ListDeidentifyTemplatesRequest): request = dlp.ListDeidentifyTemplatesRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent @@ -1653,7 +1618,6 @@ def delete_deidentify_template( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1676,10 +1640,8 @@ def delete_deidentify_template( # there are no flattened fields. if not isinstance(request, dlp.DeleteDeidentifyTemplateRequest): request = dlp.DeleteDeidentifyTemplateRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -1749,7 +1711,6 @@ def create_job_trigger( This corresponds to the ``job_trigger`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1780,10 +1741,8 @@ def create_job_trigger( # there are no flattened fields. if not isinstance(request, dlp.CreateJobTriggerRequest): request = dlp.CreateJobTriggerRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent if job_trigger is not None: @@ -1811,7 +1770,7 @@ def update_job_trigger( *, name: str = None, job_trigger: dlp.JobTrigger = None, - update_mask: field_mask.FieldMask = None, + update_mask: field_mask_pb2.FieldMask = None, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), @@ -1844,7 +1803,6 @@ def update_job_trigger( This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1875,10 +1833,8 @@ def update_job_trigger( # there are no flattened fields. if not isinstance(request, dlp.UpdateJobTriggerRequest): request = dlp.UpdateJobTriggerRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name if job_trigger is not None: @@ -1932,7 +1888,6 @@ def hybrid_inspect_job_trigger( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1961,10 +1916,8 @@ def hybrid_inspect_job_trigger( # there are no flattened fields. if not isinstance(request, dlp.HybridInspectJobTriggerRequest): request = dlp.HybridInspectJobTriggerRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -2010,7 +1963,6 @@ def get_job_trigger( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2041,10 +1993,8 @@ def get_job_trigger( # there are no flattened fields. if not isinstance(request, dlp.GetJobTriggerRequest): request = dlp.GetJobTriggerRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -2104,7 +2054,6 @@ def list_job_triggers( This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2135,10 +2084,8 @@ def list_job_triggers( # there are no flattened fields. if not isinstance(request, dlp.ListJobTriggersRequest): request = dlp.ListJobTriggersRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent @@ -2189,7 +2136,6 @@ def delete_job_trigger( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2212,10 +2158,8 @@ def delete_job_trigger( # there are no flattened fields. if not isinstance(request, dlp.DeleteJobTriggerRequest): request = dlp.DeleteJobTriggerRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -2250,7 +2194,6 @@ def activate_job_trigger( request (google.cloud.dlp_v2.types.ActivateJobTriggerRequest): The request object. Request message for ActivateJobTrigger. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2264,7 +2207,6 @@ def activate_job_trigger( """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes # in a dlp.ActivateJobTriggerRequest. # There's no risk of modifying the input as we've already verified @@ -2353,7 +2295,6 @@ def create_dlp_job( This corresponds to the ``risk_job`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2382,10 +2323,8 @@ def create_dlp_job( # there are no flattened fields. if not isinstance(request, dlp.CreateDlpJobRequest): request = dlp.CreateDlpJobRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent if inspect_job is not None: @@ -2452,7 +2391,6 @@ def list_dlp_jobs( This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2484,10 +2422,8 @@ def list_dlp_jobs( # there are no flattened fields. if not isinstance(request, dlp.ListDlpJobsRequest): request = dlp.ListDlpJobsRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent @@ -2538,7 +2474,6 @@ def get_dlp_job( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2567,10 +2502,8 @@ def get_dlp_job( # there are no flattened fields. if not isinstance(request, dlp.GetDlpJobRequest): request = dlp.GetDlpJobRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -2617,7 +2550,6 @@ def delete_dlp_job( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2640,10 +2572,8 @@ def delete_dlp_job( # there are no flattened fields. if not isinstance(request, dlp.DeleteDlpJobRequest): request = dlp.DeleteDlpJobRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -2681,7 +2611,6 @@ def cancel_dlp_job( request (google.cloud.dlp_v2.types.CancelDlpJobRequest): The request object. The request message for canceling a DLP job. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2689,7 +2618,6 @@ def cancel_dlp_job( sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes # in a dlp.CancelDlpJobRequest. # There's no risk of modifying the input as we've already verified @@ -2767,7 +2695,6 @@ def create_stored_info_type( This corresponds to the ``config`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2797,10 +2724,8 @@ def create_stored_info_type( # there are no flattened fields. if not isinstance(request, dlp.CreateStoredInfoTypeRequest): request = dlp.CreateStoredInfoTypeRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent if config is not None: @@ -2828,7 +2753,7 @@ def update_stored_info_type( *, name: str = None, config: dlp.StoredInfoTypeConfig = None, - update_mask: field_mask.FieldMask = None, + update_mask: field_mask_pb2.FieldMask = None, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), @@ -2868,7 +2793,6 @@ def update_stored_info_type( This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2898,10 +2822,8 @@ def update_stored_info_type( # there are no flattened fields. if not isinstance(request, dlp.UpdateStoredInfoTypeRequest): request = dlp.UpdateStoredInfoTypeRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name if config is not None: @@ -2951,7 +2873,6 @@ def get_stored_info_type( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2981,10 +2902,8 @@ def get_stored_info_type( # there are no flattened fields. if not isinstance(request, dlp.GetStoredInfoTypeRequest): request = dlp.GetStoredInfoTypeRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -3050,7 +2969,6 @@ def list_stored_info_types( This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -3082,10 +3000,8 @@ def list_stored_info_types( # there are no flattened fields. if not isinstance(request, dlp.ListStoredInfoTypesRequest): request = dlp.ListStoredInfoTypesRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent @@ -3137,7 +3053,6 @@ def delete_stored_info_type( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -3160,10 +3075,8 @@ def delete_stored_info_type( # there are no flattened fields. if not isinstance(request, dlp.DeleteStoredInfoTypeRequest): request = dlp.DeleteStoredInfoTypeRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -3211,7 +3124,6 @@ def hybrid_inspect_dlp_job( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -3240,10 +3152,8 @@ def hybrid_inspect_dlp_job( # there are no flattened fields. if not isinstance(request, dlp.HybridInspectDlpJobRequest): request = dlp.HybridInspectDlpJobRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -3282,7 +3192,6 @@ def finish_dlp_job( request (google.cloud.dlp_v2.types.FinishDlpJobRequest): The request object. The request message for finishing a DLP hybrid job. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -3290,7 +3199,6 @@ def finish_dlp_job( sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes # in a dlp.FinishDlpJobRequest. # There's no risk of modifying the input as we've already verified diff --git a/google/cloud/dlp_v2/services/dlp_service/pagers.py b/google/cloud/dlp_v2/services/dlp_service/pagers.py index ca5a5b31..2854322b 100644 --- a/google/cloud/dlp_v2/services/dlp_service/pagers.py +++ b/google/cloud/dlp_v2/services/dlp_service/pagers.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from typing import ( Any, AsyncIterable, @@ -117,7 +115,7 @@ def __init__( *, metadata: Sequence[Tuple[str, str]] = () ): - """Instantiate the pager. + """Instantiates the pager. Args: method (Callable): The method that was originally called, and @@ -245,7 +243,7 @@ def __init__( *, metadata: Sequence[Tuple[str, str]] = () ): - """Instantiate the pager. + """Instantiates the pager. Args: method (Callable): The method that was originally called, and @@ -373,7 +371,7 @@ def __init__( *, metadata: Sequence[Tuple[str, str]] = () ): - """Instantiate the pager. + """Instantiates the pager. Args: method (Callable): The method that was originally called, and @@ -501,7 +499,7 @@ def __init__( *, metadata: Sequence[Tuple[str, str]] = () ): - """Instantiate the pager. + """Instantiates the pager. Args: method (Callable): The method that was originally called, and @@ -629,7 +627,7 @@ def __init__( *, metadata: Sequence[Tuple[str, str]] = () ): - """Instantiate the pager. + """Instantiates the pager. Args: method (Callable): The method that was originally called, and diff --git a/google/cloud/dlp_v2/services/dlp_service/transports/__init__.py b/google/cloud/dlp_v2/services/dlp_service/transports/__init__.py index fcd0f4ba..fb655278 100644 --- a/google/cloud/dlp_v2/services/dlp_service/transports/__init__.py +++ b/google/cloud/dlp_v2/services/dlp_service/transports/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from collections import OrderedDict from typing import Dict, Type diff --git a/google/cloud/dlp_v2/services/dlp_service/transports/base.py b/google/cloud/dlp_v2/services/dlp_service/transports/base.py index e90ae69b..fa62c860 100644 --- a/google/cloud/dlp_v2/services/dlp_service/transports/base.py +++ b/google/cloud/dlp_v2/services/dlp_service/transports/base.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,20 +13,20 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import abc -import typing +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union +import packaging.version import pkg_resources -from google import auth # type: ignore -from google.api_core import exceptions # type: ignore +import google.auth # type: ignore +import google.api_core # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.cloud.dlp_v2.types import dlp -from google.protobuf import empty_pb2 as empty # type: ignore - +from google.protobuf import empty_pb2 # type: ignore try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( @@ -36,27 +35,41 @@ except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +try: + # google.auth.__version__ was added in 1.26.0 + _GOOGLE_AUTH_VERSION = google.auth.__version__ +except AttributeError: + try: # try pkg_resources if it is available + _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version + except pkg_resources.DistributionNotFound: # pragma: NO COVER + _GOOGLE_AUTH_VERSION = None + +_API_CORE_VERSION = google.api_core.__version__ + class DlpServiceTransport(abc.ABC): """Abstract transport class for DlpService.""" AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + DEFAULT_HOST: str = "dlp.googleapis.com" + def __init__( self, *, - host: str = "dlp.googleapis.com", - credentials: credentials.Credentials = None, - credentials_file: typing.Optional[str] = None, - scopes: typing.Optional[typing.Sequence[str]] = AUTH_SCOPES, - quota_project_id: typing.Optional[str] = None, + host: str = DEFAULT_HOST, + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, **kwargs, ) -> None: """Instantiate the transport. Args: - host (Optional[str]): The hostname to connect to. + host (Optional[str]): + The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -65,7 +78,7 @@ def __init__( credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. This argument is mutually exclusive with credentials. - scope (Optional[Sequence[str]]): A list of scopes. + scopes (Optional[Sequence[str]]): A list of scopes. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -79,29 +92,76 @@ def __init__( host += ":443" self._host = host + scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + # Save the scopes. self._scopes = scopes or self.AUTH_SCOPES # If no credentials are provided, then determine the appropriate # defaults. if credentials and credentials_file: - raise exceptions.DuplicateCredentialArgs( + raise core_exceptions.DuplicateCredentialArgs( "'credentials_file' and 'credentials' are mutually exclusive" ) if credentials_file is not None: - credentials, _ = auth.load_credentials_from_file( - credentials_file, scopes=self._scopes, quota_project_id=quota_project_id + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id ) elif credentials is None: - credentials, _ = auth.default( - scopes=self._scopes, quota_project_id=quota_project_id + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id ) # Save the credentials. self._credentials = credentials + # TODO(busunkim): These two class methods are in the base transport + # to avoid duplicating code across the transport classes. These functions + # should be deleted once the minimum required versions of google-api-core + # and google-auth are increased. + + # TODO: Remove this function once google-auth >= 1.25.0 is required + @classmethod + def _get_scopes_kwargs( + cls, host: str, scopes: Optional[Sequence[str]] + ) -> Dict[str, Optional[Sequence[str]]]: + """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" + + scopes_kwargs = {} + + if _GOOGLE_AUTH_VERSION and ( + packaging.version.parse(_GOOGLE_AUTH_VERSION) + >= packaging.version.parse("1.25.0") + ): + scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} + else: + scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} + + return scopes_kwargs + + # TODO: Remove this function once google-api-core >= 1.26.0 is required + @classmethod + def _get_self_signed_jwt_kwargs( + cls, host: str, scopes: Optional[Sequence[str]] + ) -> Dict[str, Union[Optional[Sequence[str]], str]]: + """Returns kwargs to pass to grpc_helpers.create_channel depending on the google-api-core version""" + + self_signed_jwt_kwargs: Dict[str, Union[Optional[Sequence[str]], str]] = {} + + if _API_CORE_VERSION and ( + packaging.version.parse(_API_CORE_VERSION) + >= packaging.version.parse("1.26.0") + ): + self_signed_jwt_kwargs["default_scopes"] = cls.AUTH_SCOPES + self_signed_jwt_kwargs["scopes"] = scopes + self_signed_jwt_kwargs["default_host"] = cls.DEFAULT_HOST + else: + self_signed_jwt_kwargs["scopes"] = scopes or cls.AUTH_SCOPES + + return self_signed_jwt_kwargs + def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -112,7 +172,8 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), deadline=300.0, ), @@ -126,7 +187,8 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), deadline=300.0, ), @@ -140,7 +202,8 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), deadline=300.0, ), @@ -154,7 +217,8 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), deadline=300.0, ), @@ -168,7 +232,8 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), deadline=300.0, ), @@ -192,7 +257,8 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), deadline=300.0, ), @@ -206,7 +272,8 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), deadline=300.0, ), @@ -220,7 +287,8 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), deadline=300.0, ), @@ -244,7 +312,8 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), deadline=300.0, ), @@ -258,7 +327,8 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), deadline=300.0, ), @@ -272,7 +342,8 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), deadline=300.0, ), @@ -297,7 +368,8 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), deadline=300.0, ), @@ -311,7 +383,8 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), deadline=300.0, ), @@ -325,7 +398,8 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), deadline=300.0, ), @@ -347,7 +421,8 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), deadline=300.0, ), @@ -361,7 +436,8 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), deadline=300.0, ), @@ -375,7 +451,8 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), deadline=300.0, ), @@ -402,7 +479,8 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), deadline=300.0, ), @@ -416,7 +494,8 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), deadline=300.0, ), @@ -430,7 +509,8 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), deadline=300.0, ), @@ -450,95 +530,83 @@ def _prep_wrapped_messages(self, client_info): @property def inspect_content( self, - ) -> typing.Callable[ + ) -> Callable[ [dlp.InspectContentRequest], - typing.Union[ - dlp.InspectContentResponse, typing.Awaitable[dlp.InspectContentResponse] - ], + Union[dlp.InspectContentResponse, Awaitable[dlp.InspectContentResponse]], ]: raise NotImplementedError() @property def redact_image( self, - ) -> typing.Callable[ + ) -> Callable[ [dlp.RedactImageRequest], - typing.Union[ - dlp.RedactImageResponse, typing.Awaitable[dlp.RedactImageResponse] - ], + Union[dlp.RedactImageResponse, Awaitable[dlp.RedactImageResponse]], ]: raise NotImplementedError() @property def deidentify_content( self, - ) -> typing.Callable[ + ) -> Callable[ [dlp.DeidentifyContentRequest], - typing.Union[ - dlp.DeidentifyContentResponse, - typing.Awaitable[dlp.DeidentifyContentResponse], - ], + Union[dlp.DeidentifyContentResponse, Awaitable[dlp.DeidentifyContentResponse]], ]: raise NotImplementedError() @property def reidentify_content( self, - ) -> typing.Callable[ + ) -> Callable[ [dlp.ReidentifyContentRequest], - typing.Union[ - dlp.ReidentifyContentResponse, - typing.Awaitable[dlp.ReidentifyContentResponse], - ], + Union[dlp.ReidentifyContentResponse, Awaitable[dlp.ReidentifyContentResponse]], ]: raise NotImplementedError() @property def list_info_types( self, - ) -> typing.Callable[ + ) -> Callable[ [dlp.ListInfoTypesRequest], - typing.Union[ - dlp.ListInfoTypesResponse, typing.Awaitable[dlp.ListInfoTypesResponse] - ], + Union[dlp.ListInfoTypesResponse, Awaitable[dlp.ListInfoTypesResponse]], ]: raise NotImplementedError() @property def create_inspect_template( self, - ) -> typing.Callable[ + ) -> Callable[ [dlp.CreateInspectTemplateRequest], - typing.Union[dlp.InspectTemplate, typing.Awaitable[dlp.InspectTemplate]], + Union[dlp.InspectTemplate, Awaitable[dlp.InspectTemplate]], ]: raise NotImplementedError() @property def update_inspect_template( self, - ) -> typing.Callable[ + ) -> Callable[ [dlp.UpdateInspectTemplateRequest], - typing.Union[dlp.InspectTemplate, typing.Awaitable[dlp.InspectTemplate]], + Union[dlp.InspectTemplate, Awaitable[dlp.InspectTemplate]], ]: raise NotImplementedError() @property def get_inspect_template( self, - ) -> typing.Callable[ + ) -> Callable[ [dlp.GetInspectTemplateRequest], - typing.Union[dlp.InspectTemplate, typing.Awaitable[dlp.InspectTemplate]], + Union[dlp.InspectTemplate, Awaitable[dlp.InspectTemplate]], ]: raise NotImplementedError() @property def list_inspect_templates( self, - ) -> typing.Callable[ + ) -> Callable[ [dlp.ListInspectTemplatesRequest], - typing.Union[ + Union[ dlp.ListInspectTemplatesResponse, - typing.Awaitable[dlp.ListInspectTemplatesResponse], + Awaitable[dlp.ListInspectTemplatesResponse], ], ]: raise NotImplementedError() @@ -546,47 +614,47 @@ def list_inspect_templates( @property def delete_inspect_template( self, - ) -> typing.Callable[ + ) -> Callable[ [dlp.DeleteInspectTemplateRequest], - typing.Union[empty.Empty, typing.Awaitable[empty.Empty]], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], ]: raise NotImplementedError() @property def create_deidentify_template( self, - ) -> typing.Callable[ + ) -> Callable[ [dlp.CreateDeidentifyTemplateRequest], - typing.Union[dlp.DeidentifyTemplate, typing.Awaitable[dlp.DeidentifyTemplate]], + Union[dlp.DeidentifyTemplate, Awaitable[dlp.DeidentifyTemplate]], ]: raise NotImplementedError() @property def update_deidentify_template( self, - ) -> typing.Callable[ + ) -> Callable[ [dlp.UpdateDeidentifyTemplateRequest], - typing.Union[dlp.DeidentifyTemplate, typing.Awaitable[dlp.DeidentifyTemplate]], + Union[dlp.DeidentifyTemplate, Awaitable[dlp.DeidentifyTemplate]], ]: raise NotImplementedError() @property def get_deidentify_template( self, - ) -> typing.Callable[ + ) -> Callable[ [dlp.GetDeidentifyTemplateRequest], - typing.Union[dlp.DeidentifyTemplate, typing.Awaitable[dlp.DeidentifyTemplate]], + Union[dlp.DeidentifyTemplate, Awaitable[dlp.DeidentifyTemplate]], ]: raise NotImplementedError() @property def list_deidentify_templates( self, - ) -> typing.Callable[ + ) -> Callable[ [dlp.ListDeidentifyTemplatesRequest], - typing.Union[ + Union[ dlp.ListDeidentifyTemplatesResponse, - typing.Awaitable[dlp.ListDeidentifyTemplatesResponse], + Awaitable[dlp.ListDeidentifyTemplatesResponse], ], ]: raise NotImplementedError() @@ -594,160 +662,142 @@ def list_deidentify_templates( @property def delete_deidentify_template( self, - ) -> typing.Callable[ + ) -> Callable[ [dlp.DeleteDeidentifyTemplateRequest], - typing.Union[empty.Empty, typing.Awaitable[empty.Empty]], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], ]: raise NotImplementedError() @property def create_job_trigger( self, - ) -> typing.Callable[ - [dlp.CreateJobTriggerRequest], - typing.Union[dlp.JobTrigger, typing.Awaitable[dlp.JobTrigger]], + ) -> Callable[ + [dlp.CreateJobTriggerRequest], Union[dlp.JobTrigger, Awaitable[dlp.JobTrigger]] ]: raise NotImplementedError() @property def update_job_trigger( self, - ) -> typing.Callable[ - [dlp.UpdateJobTriggerRequest], - typing.Union[dlp.JobTrigger, typing.Awaitable[dlp.JobTrigger]], + ) -> Callable[ + [dlp.UpdateJobTriggerRequest], Union[dlp.JobTrigger, Awaitable[dlp.JobTrigger]] ]: raise NotImplementedError() @property def hybrid_inspect_job_trigger( self, - ) -> typing.Callable[ + ) -> Callable[ [dlp.HybridInspectJobTriggerRequest], - typing.Union[ - dlp.HybridInspectResponse, typing.Awaitable[dlp.HybridInspectResponse] - ], + Union[dlp.HybridInspectResponse, Awaitable[dlp.HybridInspectResponse]], ]: raise NotImplementedError() @property def get_job_trigger( self, - ) -> typing.Callable[ - [dlp.GetJobTriggerRequest], - typing.Union[dlp.JobTrigger, typing.Awaitable[dlp.JobTrigger]], + ) -> Callable[ + [dlp.GetJobTriggerRequest], Union[dlp.JobTrigger, Awaitable[dlp.JobTrigger]] ]: raise NotImplementedError() @property def list_job_triggers( self, - ) -> typing.Callable[ + ) -> Callable[ [dlp.ListJobTriggersRequest], - typing.Union[ - dlp.ListJobTriggersResponse, typing.Awaitable[dlp.ListJobTriggersResponse] - ], + Union[dlp.ListJobTriggersResponse, Awaitable[dlp.ListJobTriggersResponse]], ]: raise NotImplementedError() @property def delete_job_trigger( self, - ) -> typing.Callable[ + ) -> Callable[ [dlp.DeleteJobTriggerRequest], - typing.Union[empty.Empty, typing.Awaitable[empty.Empty]], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], ]: raise NotImplementedError() @property def activate_job_trigger( self, - ) -> typing.Callable[ - [dlp.ActivateJobTriggerRequest], - typing.Union[dlp.DlpJob, typing.Awaitable[dlp.DlpJob]], + ) -> Callable[ + [dlp.ActivateJobTriggerRequest], Union[dlp.DlpJob, Awaitable[dlp.DlpJob]] ]: raise NotImplementedError() @property def create_dlp_job( self, - ) -> typing.Callable[ - [dlp.CreateDlpJobRequest], - typing.Union[dlp.DlpJob, typing.Awaitable[dlp.DlpJob]], - ]: + ) -> Callable[[dlp.CreateDlpJobRequest], Union[dlp.DlpJob, Awaitable[dlp.DlpJob]]]: raise NotImplementedError() @property def list_dlp_jobs( self, - ) -> typing.Callable[ + ) -> Callable[ [dlp.ListDlpJobsRequest], - typing.Union[ - dlp.ListDlpJobsResponse, typing.Awaitable[dlp.ListDlpJobsResponse] - ], + Union[dlp.ListDlpJobsResponse, Awaitable[dlp.ListDlpJobsResponse]], ]: raise NotImplementedError() @property def get_dlp_job( self, - ) -> typing.Callable[ - [dlp.GetDlpJobRequest], typing.Union[dlp.DlpJob, typing.Awaitable[dlp.DlpJob]] - ]: + ) -> Callable[[dlp.GetDlpJobRequest], Union[dlp.DlpJob, Awaitable[dlp.DlpJob]]]: raise NotImplementedError() @property def delete_dlp_job( self, - ) -> typing.Callable[ - [dlp.DeleteDlpJobRequest], - typing.Union[empty.Empty, typing.Awaitable[empty.Empty]], + ) -> Callable[ + [dlp.DeleteDlpJobRequest], Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]] ]: raise NotImplementedError() @property def cancel_dlp_job( self, - ) -> typing.Callable[ - [dlp.CancelDlpJobRequest], - typing.Union[empty.Empty, typing.Awaitable[empty.Empty]], + ) -> Callable[ + [dlp.CancelDlpJobRequest], Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]] ]: raise NotImplementedError() @property def create_stored_info_type( self, - ) -> typing.Callable[ + ) -> Callable[ [dlp.CreateStoredInfoTypeRequest], - typing.Union[dlp.StoredInfoType, typing.Awaitable[dlp.StoredInfoType]], + Union[dlp.StoredInfoType, Awaitable[dlp.StoredInfoType]], ]: raise NotImplementedError() @property def update_stored_info_type( self, - ) -> typing.Callable[ + ) -> Callable[ [dlp.UpdateStoredInfoTypeRequest], - typing.Union[dlp.StoredInfoType, typing.Awaitable[dlp.StoredInfoType]], + Union[dlp.StoredInfoType, Awaitable[dlp.StoredInfoType]], ]: raise NotImplementedError() @property def get_stored_info_type( self, - ) -> typing.Callable[ + ) -> Callable[ [dlp.GetStoredInfoTypeRequest], - typing.Union[dlp.StoredInfoType, typing.Awaitable[dlp.StoredInfoType]], + Union[dlp.StoredInfoType, Awaitable[dlp.StoredInfoType]], ]: raise NotImplementedError() @property def list_stored_info_types( self, - ) -> typing.Callable[ + ) -> Callable[ [dlp.ListStoredInfoTypesRequest], - typing.Union[ - dlp.ListStoredInfoTypesResponse, - typing.Awaitable[dlp.ListStoredInfoTypesResponse], + Union[ + dlp.ListStoredInfoTypesResponse, Awaitable[dlp.ListStoredInfoTypesResponse] ], ]: raise NotImplementedError() @@ -755,29 +805,26 @@ def list_stored_info_types( @property def delete_stored_info_type( self, - ) -> typing.Callable[ + ) -> Callable[ [dlp.DeleteStoredInfoTypeRequest], - typing.Union[empty.Empty, typing.Awaitable[empty.Empty]], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], ]: raise NotImplementedError() @property def hybrid_inspect_dlp_job( self, - ) -> typing.Callable[ + ) -> Callable[ [dlp.HybridInspectDlpJobRequest], - typing.Union[ - dlp.HybridInspectResponse, typing.Awaitable[dlp.HybridInspectResponse] - ], + Union[dlp.HybridInspectResponse, Awaitable[dlp.HybridInspectResponse]], ]: raise NotImplementedError() @property def finish_dlp_job( self, - ) -> typing.Callable[ - [dlp.FinishDlpJobRequest], - typing.Union[empty.Empty, typing.Awaitable[empty.Empty]], + ) -> Callable[ + [dlp.FinishDlpJobRequest], Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]] ]: raise NotImplementedError() diff --git a/google/cloud/dlp_v2/services/dlp_service/transports/grpc.py b/google/cloud/dlp_v2/services/dlp_service/transports/grpc.py index a6fd0c7a..32470d26 100644 --- a/google/cloud/dlp_v2/services/dlp_service/transports/grpc.py +++ b/google/cloud/dlp_v2/services/dlp_service/transports/grpc.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,21 +13,19 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple +from typing import Callable, Dict, Optional, Sequence, Tuple, Union from google.api_core import grpc_helpers # type: ignore from google.api_core import gapic_v1 # type: ignore -from google import auth # type: ignore -from google.auth import credentials # type: ignore +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore import grpc # type: ignore from google.cloud.dlp_v2.types import dlp -from google.protobuf import empty_pb2 as empty # type: ignore - +from google.protobuf import empty_pb2 # type: ignore from .base import DlpServiceTransport, DEFAULT_CLIENT_INFO @@ -59,7 +56,7 @@ def __init__( self, *, host: str = "dlp.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: str = None, scopes: Sequence[str] = None, channel: grpc.Channel = None, @@ -73,7 +70,8 @@ def __init__( """Instantiate the transport. Args: - host (Optional[str]): The hostname to connect to. + host (Optional[str]): + The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -183,7 +181,7 @@ def __init__( def create_channel( cls, host: str = "dlp.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: str = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, @@ -214,13 +212,15 @@ def create_channel( google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ - scopes = scopes or cls.AUTH_SCOPES + + self_signed_jwt_kwargs = cls._get_self_signed_jwt_kwargs(host, scopes) + return grpc_helpers.create_channel( host, credentials=credentials, credentials_file=credentials_file, - scopes=scopes, quota_project_id=quota_project_id, + **self_signed_jwt_kwargs, **kwargs, ) @@ -508,7 +508,7 @@ def list_inspect_templates( @property def delete_inspect_template( self, - ) -> Callable[[dlp.DeleteInspectTemplateRequest], empty.Empty]: + ) -> Callable[[dlp.DeleteInspectTemplateRequest], empty_pb2.Empty]: r"""Return a callable for the delete inspect template method over gRPC. Deletes an InspectTemplate. @@ -529,7 +529,7 @@ def delete_inspect_template( self._stubs["delete_inspect_template"] = self.grpc_channel.unary_unary( "/google.privacy.dlp.v2.DlpService/DeleteInspectTemplate", request_serializer=dlp.DeleteInspectTemplateRequest.serialize, - response_deserializer=empty.Empty.FromString, + response_deserializer=empty_pb2.Empty.FromString, ) return self._stubs["delete_inspect_template"] @@ -652,7 +652,7 @@ def list_deidentify_templates( @property def delete_deidentify_template( self, - ) -> Callable[[dlp.DeleteDeidentifyTemplateRequest], empty.Empty]: + ) -> Callable[[dlp.DeleteDeidentifyTemplateRequest], empty_pb2.Empty]: r"""Return a callable for the delete deidentify template method over gRPC. Deletes a DeidentifyTemplate. @@ -673,7 +673,7 @@ def delete_deidentify_template( self._stubs["delete_deidentify_template"] = self.grpc_channel.unary_unary( "/google.privacy.dlp.v2.DlpService/DeleteDeidentifyTemplate", request_serializer=dlp.DeleteDeidentifyTemplateRequest.serialize, - response_deserializer=empty.Empty.FromString, + response_deserializer=empty_pb2.Empty.FromString, ) return self._stubs["delete_deidentify_template"] @@ -825,7 +825,7 @@ def list_job_triggers( @property def delete_job_trigger( self, - ) -> Callable[[dlp.DeleteJobTriggerRequest], empty.Empty]: + ) -> Callable[[dlp.DeleteJobTriggerRequest], empty_pb2.Empty]: r"""Return a callable for the delete job trigger method over gRPC. Deletes a job trigger. @@ -846,7 +846,7 @@ def delete_job_trigger( self._stubs["delete_job_trigger"] = self.grpc_channel.unary_unary( "/google.privacy.dlp.v2.DlpService/DeleteJobTrigger", request_serializer=dlp.DeleteJobTriggerRequest.serialize, - response_deserializer=empty.Empty.FromString, + response_deserializer=empty_pb2.Empty.FromString, ) return self._stubs["delete_job_trigger"] @@ -968,7 +968,7 @@ def get_dlp_job(self) -> Callable[[dlp.GetDlpJobRequest], dlp.DlpJob]: return self._stubs["get_dlp_job"] @property - def delete_dlp_job(self) -> Callable[[dlp.DeleteDlpJobRequest], empty.Empty]: + def delete_dlp_job(self) -> Callable[[dlp.DeleteDlpJobRequest], empty_pb2.Empty]: r"""Return a callable for the delete dlp job method over gRPC. Deletes a long-running DlpJob. This method indicates @@ -992,12 +992,12 @@ def delete_dlp_job(self) -> Callable[[dlp.DeleteDlpJobRequest], empty.Empty]: self._stubs["delete_dlp_job"] = self.grpc_channel.unary_unary( "/google.privacy.dlp.v2.DlpService/DeleteDlpJob", request_serializer=dlp.DeleteDlpJobRequest.serialize, - response_deserializer=empty.Empty.FromString, + response_deserializer=empty_pb2.Empty.FromString, ) return self._stubs["delete_dlp_job"] @property - def cancel_dlp_job(self) -> Callable[[dlp.CancelDlpJobRequest], empty.Empty]: + def cancel_dlp_job(self) -> Callable[[dlp.CancelDlpJobRequest], empty_pb2.Empty]: r"""Return a callable for the cancel dlp job method over gRPC. Starts asynchronous cancellation on a long-running @@ -1021,7 +1021,7 @@ def cancel_dlp_job(self) -> Callable[[dlp.CancelDlpJobRequest], empty.Empty]: self._stubs["cancel_dlp_job"] = self.grpc_channel.unary_unary( "/google.privacy.dlp.v2.DlpService/CancelDlpJob", request_serializer=dlp.CancelDlpJobRequest.serialize, - response_deserializer=empty.Empty.FromString, + response_deserializer=empty_pb2.Empty.FromString, ) return self._stubs["cancel_dlp_job"] @@ -1143,7 +1143,7 @@ def list_stored_info_types( @property def delete_stored_info_type( self, - ) -> Callable[[dlp.DeleteStoredInfoTypeRequest], empty.Empty]: + ) -> Callable[[dlp.DeleteStoredInfoTypeRequest], empty_pb2.Empty]: r"""Return a callable for the delete stored info type method over gRPC. Deletes a stored infoType. @@ -1164,7 +1164,7 @@ def delete_stored_info_type( self._stubs["delete_stored_info_type"] = self.grpc_channel.unary_unary( "/google.privacy.dlp.v2.DlpService/DeleteStoredInfoType", request_serializer=dlp.DeleteStoredInfoTypeRequest.serialize, - response_deserializer=empty.Empty.FromString, + response_deserializer=empty_pb2.Empty.FromString, ) return self._stubs["delete_stored_info_type"] @@ -1201,7 +1201,7 @@ def hybrid_inspect_dlp_job( return self._stubs["hybrid_inspect_dlp_job"] @property - def finish_dlp_job(self) -> Callable[[dlp.FinishDlpJobRequest], empty.Empty]: + def finish_dlp_job(self) -> Callable[[dlp.FinishDlpJobRequest], empty_pb2.Empty]: r"""Return a callable for the finish dlp job method over gRPC. Finish a running hybrid DlpJob. Triggers the @@ -1225,7 +1225,7 @@ def finish_dlp_job(self) -> Callable[[dlp.FinishDlpJobRequest], empty.Empty]: self._stubs["finish_dlp_job"] = self.grpc_channel.unary_unary( "/google.privacy.dlp.v2.DlpService/FinishDlpJob", request_serializer=dlp.FinishDlpJobRequest.serialize, - response_deserializer=empty.Empty.FromString, + response_deserializer=empty_pb2.Empty.FromString, ) return self._stubs["finish_dlp_job"] diff --git a/google/cloud/dlp_v2/services/dlp_service/transports/grpc_asyncio.py b/google/cloud/dlp_v2/services/dlp_service/transports/grpc_asyncio.py index ff938c48..eeb9d919 100644 --- a/google/cloud/dlp_v2/services/dlp_service/transports/grpc_asyncio.py +++ b/google/cloud/dlp_v2/services/dlp_service/transports/grpc_asyncio.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,22 +13,20 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union from google.api_core import gapic_v1 # type: ignore from google.api_core import grpc_helpers_async # type: ignore -from google import auth # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +import packaging.version import grpc # type: ignore from grpc.experimental import aio # type: ignore from google.cloud.dlp_v2.types import dlp -from google.protobuf import empty_pb2 as empty # type: ignore - +from google.protobuf import empty_pb2 # type: ignore from .base import DlpServiceTransport, DEFAULT_CLIENT_INFO from .grpc import DlpServiceGrpcTransport @@ -62,7 +59,7 @@ class DlpServiceGrpcAsyncIOTransport(DlpServiceTransport): def create_channel( cls, host: str = "dlp.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, @@ -89,13 +86,15 @@ def create_channel( Returns: aio.Channel: A gRPC AsyncIO channel object. """ - scopes = scopes or cls.AUTH_SCOPES + + self_signed_jwt_kwargs = cls._get_self_signed_jwt_kwargs(host, scopes) + return grpc_helpers_async.create_channel( host, credentials=credentials, credentials_file=credentials_file, - scopes=scopes, quota_project_id=quota_project_id, + **self_signed_jwt_kwargs, **kwargs, ) @@ -103,7 +102,7 @@ def __init__( self, *, host: str = "dlp.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, channel: aio.Channel = None, @@ -117,7 +116,8 @@ def __init__( """Instantiate the transport. Args: - host (Optional[str]): The hostname to connect to. + host (Optional[str]): + The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -175,7 +175,6 @@ def __init__( # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None - else: if api_mtls_endpoint: host = api_mtls_endpoint @@ -518,7 +517,7 @@ def list_inspect_templates( @property def delete_inspect_template( self, - ) -> Callable[[dlp.DeleteInspectTemplateRequest], Awaitable[empty.Empty]]: + ) -> Callable[[dlp.DeleteInspectTemplateRequest], Awaitable[empty_pb2.Empty]]: r"""Return a callable for the delete inspect template method over gRPC. Deletes an InspectTemplate. @@ -539,7 +538,7 @@ def delete_inspect_template( self._stubs["delete_inspect_template"] = self.grpc_channel.unary_unary( "/google.privacy.dlp.v2.DlpService/DeleteInspectTemplate", request_serializer=dlp.DeleteInspectTemplateRequest.serialize, - response_deserializer=empty.Empty.FromString, + response_deserializer=empty_pb2.Empty.FromString, ) return self._stubs["delete_inspect_template"] @@ -669,7 +668,7 @@ def list_deidentify_templates( @property def delete_deidentify_template( self, - ) -> Callable[[dlp.DeleteDeidentifyTemplateRequest], Awaitable[empty.Empty]]: + ) -> Callable[[dlp.DeleteDeidentifyTemplateRequest], Awaitable[empty_pb2.Empty]]: r"""Return a callable for the delete deidentify template method over gRPC. Deletes a DeidentifyTemplate. @@ -690,7 +689,7 @@ def delete_deidentify_template( self._stubs["delete_deidentify_template"] = self.grpc_channel.unary_unary( "/google.privacy.dlp.v2.DlpService/DeleteDeidentifyTemplate", request_serializer=dlp.DeleteDeidentifyTemplateRequest.serialize, - response_deserializer=empty.Empty.FromString, + response_deserializer=empty_pb2.Empty.FromString, ) return self._stubs["delete_deidentify_template"] @@ -846,7 +845,7 @@ def list_job_triggers( @property def delete_job_trigger( self, - ) -> Callable[[dlp.DeleteJobTriggerRequest], Awaitable[empty.Empty]]: + ) -> Callable[[dlp.DeleteJobTriggerRequest], Awaitable[empty_pb2.Empty]]: r"""Return a callable for the delete job trigger method over gRPC. Deletes a job trigger. @@ -867,7 +866,7 @@ def delete_job_trigger( self._stubs["delete_job_trigger"] = self.grpc_channel.unary_unary( "/google.privacy.dlp.v2.DlpService/DeleteJobTrigger", request_serializer=dlp.DeleteJobTriggerRequest.serialize, - response_deserializer=empty.Empty.FromString, + response_deserializer=empty_pb2.Empty.FromString, ) return self._stubs["delete_job_trigger"] @@ -993,7 +992,7 @@ def get_dlp_job(self) -> Callable[[dlp.GetDlpJobRequest], Awaitable[dlp.DlpJob]] @property def delete_dlp_job( self, - ) -> Callable[[dlp.DeleteDlpJobRequest], Awaitable[empty.Empty]]: + ) -> Callable[[dlp.DeleteDlpJobRequest], Awaitable[empty_pb2.Empty]]: r"""Return a callable for the delete dlp job method over gRPC. Deletes a long-running DlpJob. This method indicates @@ -1017,14 +1016,14 @@ def delete_dlp_job( self._stubs["delete_dlp_job"] = self.grpc_channel.unary_unary( "/google.privacy.dlp.v2.DlpService/DeleteDlpJob", request_serializer=dlp.DeleteDlpJobRequest.serialize, - response_deserializer=empty.Empty.FromString, + response_deserializer=empty_pb2.Empty.FromString, ) return self._stubs["delete_dlp_job"] @property def cancel_dlp_job( self, - ) -> Callable[[dlp.CancelDlpJobRequest], Awaitable[empty.Empty]]: + ) -> Callable[[dlp.CancelDlpJobRequest], Awaitable[empty_pb2.Empty]]: r"""Return a callable for the cancel dlp job method over gRPC. Starts asynchronous cancellation on a long-running @@ -1048,7 +1047,7 @@ def cancel_dlp_job( self._stubs["cancel_dlp_job"] = self.grpc_channel.unary_unary( "/google.privacy.dlp.v2.DlpService/CancelDlpJob", request_serializer=dlp.CancelDlpJobRequest.serialize, - response_deserializer=empty.Empty.FromString, + response_deserializer=empty_pb2.Empty.FromString, ) return self._stubs["cancel_dlp_job"] @@ -1172,7 +1171,7 @@ def list_stored_info_types( @property def delete_stored_info_type( self, - ) -> Callable[[dlp.DeleteStoredInfoTypeRequest], Awaitable[empty.Empty]]: + ) -> Callable[[dlp.DeleteStoredInfoTypeRequest], Awaitable[empty_pb2.Empty]]: r"""Return a callable for the delete stored info type method over gRPC. Deletes a stored infoType. @@ -1193,7 +1192,7 @@ def delete_stored_info_type( self._stubs["delete_stored_info_type"] = self.grpc_channel.unary_unary( "/google.privacy.dlp.v2.DlpService/DeleteStoredInfoType", request_serializer=dlp.DeleteStoredInfoTypeRequest.serialize, - response_deserializer=empty.Empty.FromString, + response_deserializer=empty_pb2.Empty.FromString, ) return self._stubs["delete_stored_info_type"] @@ -1234,7 +1233,7 @@ def hybrid_inspect_dlp_job( @property def finish_dlp_job( self, - ) -> Callable[[dlp.FinishDlpJobRequest], Awaitable[empty.Empty]]: + ) -> Callable[[dlp.FinishDlpJobRequest], Awaitable[empty_pb2.Empty]]: r"""Return a callable for the finish dlp job method over gRPC. Finish a running hybrid DlpJob. Triggers the @@ -1258,7 +1257,7 @@ def finish_dlp_job( self._stubs["finish_dlp_job"] = self.grpc_channel.unary_unary( "/google.privacy.dlp.v2.DlpService/FinishDlpJob", request_serializer=dlp.FinishDlpJobRequest.serialize, - response_deserializer=empty.Empty.FromString, + response_deserializer=empty_pb2.Empty.FromString, ) return self._stubs["finish_dlp_job"] diff --git a/google/cloud/dlp_v2/types/__init__.py b/google/cloud/dlp_v2/types/__init__.py index 8070ff45..66c89af5 100644 --- a/google/cloud/dlp_v2/types/__init__.py +++ b/google/cloud/dlp_v2/types/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from .dlp import ( Action, ActivateJobTriggerRequest, diff --git a/google/cloud/dlp_v2/types/dlp.py b/google/cloud/dlp_v2/types/dlp.py index 921bc60c..f4f6f873 100644 --- a/google/cloud/dlp_v2/types/dlp.py +++ b/google/cloud/dlp_v2/types/dlp.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,19 +13,17 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import proto # type: ignore - from google.cloud.dlp_v2.types import storage -from google.protobuf import duration_pb2 as duration # type: ignore -from google.protobuf import empty_pb2 as empty # type: ignore -from google.protobuf import field_mask_pb2 as field_mask # type: ignore -from google.protobuf import timestamp_pb2 as timestamp # type: ignore -from google.rpc import status_pb2 as gr_status # type: ignore -from google.type import date_pb2 as gt_date # type: ignore -from google.type import dayofweek_pb2 as dayofweek # type: ignore -from google.type import timeofday_pb2 as timeofday # type: ignore +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore +from google.type import date_pb2 # type: ignore +from google.type import dayofweek_pb2 # type: ignore +from google.type import timeofday_pb2 # type: ignore __protobuf__ = proto.module( @@ -230,7 +227,6 @@ class StoredInfoTypeState(proto.Enum): class ExcludeInfoTypes(proto.Message): r"""List of exclude infoTypes. - Attributes: info_types (Sequence[google.cloud.dlp_v2.types.InfoType]): InfoType list in ExclusionRule rule drops a finding when it @@ -270,15 +266,12 @@ class ExclusionRule(proto.Message): oneof="type", message=storage.CustomInfoType.Dictionary, ) - regex = proto.Field( proto.MESSAGE, number=2, oneof="type", message=storage.CustomInfoType.Regex, ) - exclude_info_types = proto.Field( proto.MESSAGE, number=3, oneof="type", message="ExcludeInfoTypes", ) - matching_type = proto.Field(proto.ENUM, number=4, enum="MatchingType",) @@ -299,7 +292,6 @@ class InspectionRule(proto.Message): oneof="type", message=storage.CustomInfoType.DetectionRule.HotwordRule, ) - exclusion_rule = proto.Field( proto.MESSAGE, number=2, oneof="type", message="ExclusionRule", ) @@ -320,7 +312,6 @@ class InspectionRuleSet(proto.Message): """ info_types = proto.RepeatedField(proto.MESSAGE, number=1, message=storage.InfoType,) - rules = proto.RepeatedField(proto.MESSAGE, number=2, message="InspectionRule",) @@ -377,7 +368,6 @@ class InspectConfig(proto.Message): class FindingLimits(proto.Message): r"""Configuration to control the number of findings returned. - Attributes: max_findings_per_item (int): Max number of findings that will be returned for each item @@ -409,13 +399,10 @@ class InfoTypeLimit(proto.Message): """ info_type = proto.Field(proto.MESSAGE, number=1, message=storage.InfoType,) + max_findings = proto.Field(proto.INT32, number=2,) - max_findings = proto.Field(proto.INT32, number=2) - - max_findings_per_item = proto.Field(proto.INT32, number=1) - - max_findings_per_request = proto.Field(proto.INT32, number=2) - + max_findings_per_item = proto.Field(proto.INT32, number=1,) + max_findings_per_request = proto.Field(proto.INT32, number=2,) max_findings_per_info_type = proto.RepeatedField( proto.MESSAGE, number=3, @@ -423,21 +410,14 @@ class InfoTypeLimit(proto.Message): ) info_types = proto.RepeatedField(proto.MESSAGE, number=1, message=storage.InfoType,) - min_likelihood = proto.Field(proto.ENUM, number=2, enum=storage.Likelihood,) - limits = proto.Field(proto.MESSAGE, number=3, message=FindingLimits,) - - include_quote = proto.Field(proto.BOOL, number=4) - - exclude_info_types = proto.Field(proto.BOOL, number=5) - + include_quote = proto.Field(proto.BOOL, number=4,) + exclude_info_types = proto.Field(proto.BOOL, number=5,) custom_info_types = proto.RepeatedField( proto.MESSAGE, number=6, message=storage.CustomInfoType, ) - content_options = proto.RepeatedField(proto.ENUM, number=8, enum="ContentOption",) - rule_set = proto.RepeatedField( proto.MESSAGE, number=10, message="InspectionRuleSet", ) @@ -445,7 +425,6 @@ class InfoTypeLimit(proto.Message): class ByteContentItem(proto.Message): r"""Container for bytes to inspect or redact. - Attributes: type_ (google.cloud.dlp_v2.types.ByteContentItem.BytesType): The type of data stored in the bytes string. Default will be @@ -470,13 +449,11 @@ class BytesType(proto.Enum): TSV = 13 type_ = proto.Field(proto.ENUM, number=1, enum=BytesType,) - - data = proto.Field(proto.BYTES, number=2) + data = proto.Field(proto.BYTES, number=2,) class ContentItem(proto.Message): r"""Container structure for the content to inspect. - Attributes: value (str): String data to inspect or redact. @@ -489,10 +466,8 @@ class ContentItem(proto.Message): ``data``. """ - value = proto.Field(proto.STRING, number=3, oneof="data_item") - + value = proto.Field(proto.STRING, number=3, oneof="data_item",) table = proto.Field(proto.MESSAGE, number=4, oneof="data_item", message="Table",) - byte_item = proto.Field( proto.MESSAGE, number=5, oneof="data_item", message="ByteContentItem", ) @@ -513,7 +488,6 @@ class Table(proto.Message): class Row(proto.Message): r"""Values of the row. - Attributes: values (Sequence[google.cloud.dlp_v2.types.Value]): Individual cells. @@ -522,13 +496,11 @@ class Row(proto.Message): values = proto.RepeatedField(proto.MESSAGE, number=1, message="Value",) headers = proto.RepeatedField(proto.MESSAGE, number=1, message=storage.FieldId,) - rows = proto.RepeatedField(proto.MESSAGE, number=2, message=Row,) class InspectResult(proto.Message): r"""All the findings for a single scanned item. - Attributes: findings (Sequence[google.cloud.dlp_v2.types.Finding]): List of findings for an item. @@ -545,13 +517,11 @@ class InspectResult(proto.Message): """ findings = proto.RepeatedField(proto.MESSAGE, number=1, message="Finding",) - - findings_truncated = proto.Field(proto.BOOL, number=2) + findings_truncated = proto.Field(proto.BOOL, number=2,) class Finding(proto.Message): r"""Represents a piece of potentially sensitive content. - Attributes: name (str): Resource name in format @@ -608,36 +578,24 @@ class Finding(proto.Message): The job that stored the finding. """ - name = proto.Field(proto.STRING, number=14) - - quote = proto.Field(proto.STRING, number=1) - + name = proto.Field(proto.STRING, number=14,) + quote = proto.Field(proto.STRING, number=1,) info_type = proto.Field(proto.MESSAGE, number=2, message=storage.InfoType,) - likelihood = proto.Field(proto.ENUM, number=3, enum=storage.Likelihood,) - location = proto.Field(proto.MESSAGE, number=4, message="Location",) - - create_time = proto.Field(proto.MESSAGE, number=6, message=timestamp.Timestamp,) - + create_time = proto.Field(proto.MESSAGE, number=6, message=timestamp_pb2.Timestamp,) quote_info = proto.Field(proto.MESSAGE, number=7, message="QuoteInfo",) - - resource_name = proto.Field(proto.STRING, number=8) - - trigger_name = proto.Field(proto.STRING, number=9) - - labels = proto.MapField(proto.STRING, proto.STRING, number=10) - + resource_name = proto.Field(proto.STRING, number=8,) + trigger_name = proto.Field(proto.STRING, number=9,) + labels = proto.MapField(proto.STRING, proto.STRING, number=10,) job_create_time = proto.Field( - proto.MESSAGE, number=11, message=timestamp.Timestamp, + proto.MESSAGE, number=11, message=timestamp_pb2.Timestamp, ) - - job_name = proto.Field(proto.STRING, number=13) + job_name = proto.Field(proto.STRING, number=13,) class Location(proto.Message): r"""Specifies the location of the finding. - Attributes: byte_range (google.cloud.dlp_v2.types.Range): Zero-based byte offsets delimiting the @@ -661,13 +619,10 @@ class Location(proto.Message): """ byte_range = proto.Field(proto.MESSAGE, number=1, message="Range",) - codepoint_range = proto.Field(proto.MESSAGE, number=2, message="Range",) - content_locations = proto.RepeatedField( proto.MESSAGE, number=7, message="ContentLocation", ) - container = proto.Field(proto.MESSAGE, number=8, message="Container",) @@ -708,34 +663,27 @@ class ContentLocation(proto.Message): ("generation" for Google Cloud Storage). """ - container_name = proto.Field(proto.STRING, number=1) - + container_name = proto.Field(proto.STRING, number=1,) record_location = proto.Field( proto.MESSAGE, number=2, oneof="location", message="RecordLocation", ) - image_location = proto.Field( proto.MESSAGE, number=3, oneof="location", message="ImageLocation", ) - document_location = proto.Field( proto.MESSAGE, number=5, oneof="location", message="DocumentLocation", ) - metadata_location = proto.Field( proto.MESSAGE, number=8, oneof="location", message="MetadataLocation", ) - container_timestamp = proto.Field( - proto.MESSAGE, number=6, message=timestamp.Timestamp, + proto.MESSAGE, number=6, message=timestamp_pb2.Timestamp, ) - - container_version = proto.Field(proto.STRING, number=7) + container_version = proto.Field(proto.STRING, number=7,) class MetadataLocation(proto.Message): r"""Metadata Location - Attributes: type_ (google.cloud.dlp_v2.types.MetadataType): Type of metadata containing the finding. @@ -744,7 +692,6 @@ class MetadataLocation(proto.Message): """ type_ = proto.Field(proto.ENUM, number=1, enum="MetadataType",) - storage_label = proto.Field( proto.MESSAGE, number=3, oneof="label", message="StorageMetadataLabel", ) @@ -759,24 +706,22 @@ class StorageMetadataLabel(proto.Message): """ - key = proto.Field(proto.STRING, number=1) + key = proto.Field(proto.STRING, number=1,) class DocumentLocation(proto.Message): r"""Location of a finding within a document. - Attributes: file_offset (int): Offset of the line, from the beginning of the file, where the finding is located. """ - file_offset = proto.Field(proto.INT64, number=1) + file_offset = proto.Field(proto.INT64, number=1,) class RecordLocation(proto.Message): r"""Location of a finding within a row or record. - Attributes: record_key (google.cloud.dlp_v2.types.RecordKey): Key of the finding. @@ -787,15 +732,12 @@ class RecordLocation(proto.Message): """ record_key = proto.Field(proto.MESSAGE, number=1, message=storage.RecordKey,) - field_id = proto.Field(proto.MESSAGE, number=2, message=storage.FieldId,) - table_location = proto.Field(proto.MESSAGE, number=3, message="TableLocation",) class TableLocation(proto.Message): r"""Location of a finding within a table. - Attributes: row_index (int): The zero-based index of the row where the finding is @@ -807,7 +749,7 @@ class TableLocation(proto.Message): those columns will be stored inside of Finding. """ - row_index = proto.Field(proto.INT64, number=1) + row_index = proto.Field(proto.INT64, number=1,) class Container(proto.Message): @@ -855,24 +797,17 @@ class Container(proto.Message): ("generation" for Google Cloud Storage). """ - type_ = proto.Field(proto.STRING, number=1) - - project_id = proto.Field(proto.STRING, number=2) - - full_path = proto.Field(proto.STRING, number=3) - - root_path = proto.Field(proto.STRING, number=4) - - relative_path = proto.Field(proto.STRING, number=5) - - update_time = proto.Field(proto.MESSAGE, number=6, message=timestamp.Timestamp,) - - version = proto.Field(proto.STRING, number=7) + type_ = proto.Field(proto.STRING, number=1,) + project_id = proto.Field(proto.STRING, number=2,) + full_path = proto.Field(proto.STRING, number=3,) + root_path = proto.Field(proto.STRING, number=4,) + relative_path = proto.Field(proto.STRING, number=5,) + update_time = proto.Field(proto.MESSAGE, number=6, message=timestamp_pb2.Timestamp,) + version = proto.Field(proto.STRING, number=7,) class Range(proto.Message): r"""Generic half-open interval [start, end) - Attributes: start (int): Index of the first character of the range @@ -882,14 +817,12 @@ class Range(proto.Message): (exclusive). """ - start = proto.Field(proto.INT64, number=1) - - end = proto.Field(proto.INT64, number=2) + start = proto.Field(proto.INT64, number=1,) + end = proto.Field(proto.INT64, number=2,) class ImageLocation(proto.Message): r"""Location of the finding within an image. - Attributes: bounding_boxes (Sequence[google.cloud.dlp_v2.types.BoundingBox]): Bounding boxes locating the pixels within the @@ -903,7 +836,6 @@ class ImageLocation(proto.Message): class BoundingBox(proto.Message): r"""Bounding box encompassing detected text within an image. - Attributes: top (int): Top coordinate of the bounding box. (0,0) is @@ -917,13 +849,10 @@ class BoundingBox(proto.Message): Height of the bounding box in pixels. """ - top = proto.Field(proto.INT32, number=1) - - left = proto.Field(proto.INT32, number=2) - - width = proto.Field(proto.INT32, number=3) - - height = proto.Field(proto.INT32, number=4) + top = proto.Field(proto.INT32, number=1,) + left = proto.Field(proto.INT32, number=2,) + width = proto.Field(proto.INT32, number=3,) + height = proto.Field(proto.INT32, number=4,) class RedactImageRequest(proto.Message): @@ -988,29 +917,21 @@ class ImageRedactionConfig(proto.Message): info_type = proto.Field( proto.MESSAGE, number=1, oneof="target", message=storage.InfoType, ) - - redact_all_text = proto.Field(proto.BOOL, number=2, oneof="target") - + redact_all_text = proto.Field(proto.BOOL, number=2, oneof="target",) redaction_color = proto.Field(proto.MESSAGE, number=3, message="Color",) - parent = proto.Field(proto.STRING, number=1) - - location_id = proto.Field(proto.STRING, number=8) - + parent = proto.Field(proto.STRING, number=1,) + location_id = proto.Field(proto.STRING, number=8,) inspect_config = proto.Field(proto.MESSAGE, number=2, message="InspectConfig",) - image_redaction_configs = proto.RepeatedField( proto.MESSAGE, number=5, message=ImageRedactionConfig, ) - - include_findings = proto.Field(proto.BOOL, number=6) - + include_findings = proto.Field(proto.BOOL, number=6,) byte_item = proto.Field(proto.MESSAGE, number=7, message="ByteContentItem",) class Color(proto.Message): r"""Represents a color in the RGB color space. - Attributes: red (float): The amount of red in the color as a value in the interval @@ -1023,16 +944,13 @@ class Color(proto.Message): [0, 1]. """ - red = proto.Field(proto.FLOAT, number=1) - - green = proto.Field(proto.FLOAT, number=2) - - blue = proto.Field(proto.FLOAT, number=3) + red = proto.Field(proto.FLOAT, number=1,) + green = proto.Field(proto.FLOAT, number=2,) + blue = proto.Field(proto.FLOAT, number=3,) class RedactImageResponse(proto.Message): r"""Results of redacting an image. - Attributes: redacted_image (bytes): The redacted image. The type will be the same @@ -1046,16 +964,13 @@ class RedactImageResponse(proto.Message): is true. """ - redacted_image = proto.Field(proto.BYTES, number=1) - - extracted_text = proto.Field(proto.STRING, number=2) - + redacted_image = proto.Field(proto.BYTES, number=1,) + extracted_text = proto.Field(proto.STRING, number=2,) inspect_result = proto.Field(proto.MESSAGE, number=3, message="InspectResult",) class DeidentifyContentRequest(proto.Message): r"""Request to de-identify a list of items. - Attributes: parent (str): Parent resource name. @@ -1105,26 +1020,19 @@ class DeidentifyContentRequest(proto.Message): Deprecated. This field has no effect. """ - parent = proto.Field(proto.STRING, number=1) - + parent = proto.Field(proto.STRING, number=1,) deidentify_config = proto.Field( proto.MESSAGE, number=2, message="DeidentifyConfig", ) - inspect_config = proto.Field(proto.MESSAGE, number=3, message="InspectConfig",) - item = proto.Field(proto.MESSAGE, number=4, message="ContentItem",) - - inspect_template_name = proto.Field(proto.STRING, number=5) - - deidentify_template_name = proto.Field(proto.STRING, number=6) - - location_id = proto.Field(proto.STRING, number=7) + inspect_template_name = proto.Field(proto.STRING, number=5,) + deidentify_template_name = proto.Field(proto.STRING, number=6,) + location_id = proto.Field(proto.STRING, number=7,) class DeidentifyContentResponse(proto.Message): r"""Results of de-identifying a ContentItem. - Attributes: item (google.cloud.dlp_v2.types.ContentItem): The de-identified item. @@ -1133,13 +1041,11 @@ class DeidentifyContentResponse(proto.Message): """ item = proto.Field(proto.MESSAGE, number=1, message="ContentItem",) - overview = proto.Field(proto.MESSAGE, number=2, message="TransformationOverview",) class ReidentifyContentRequest(proto.Message): r"""Request to re-identify an item. - Attributes: parent (str): Required. Parent resource name. @@ -1199,26 +1105,19 @@ class ReidentifyContentRequest(proto.Message): Deprecated. This field has no effect. """ - parent = proto.Field(proto.STRING, number=1) - + parent = proto.Field(proto.STRING, number=1,) reidentify_config = proto.Field( proto.MESSAGE, number=2, message="DeidentifyConfig", ) - inspect_config = proto.Field(proto.MESSAGE, number=3, message="InspectConfig",) - item = proto.Field(proto.MESSAGE, number=4, message="ContentItem",) - - inspect_template_name = proto.Field(proto.STRING, number=5) - - reidentify_template_name = proto.Field(proto.STRING, number=6) - - location_id = proto.Field(proto.STRING, number=7) + inspect_template_name = proto.Field(proto.STRING, number=5,) + reidentify_template_name = proto.Field(proto.STRING, number=6,) + location_id = proto.Field(proto.STRING, number=7,) class ReidentifyContentResponse(proto.Message): r"""Results of re-identifying a item. - Attributes: item (google.cloud.dlp_v2.types.ContentItem): The re-identified item. @@ -1227,7 +1126,6 @@ class ReidentifyContentResponse(proto.Message): """ item = proto.Field(proto.MESSAGE, number=1, message="ContentItem",) - overview = proto.Field(proto.MESSAGE, number=2, message="TransformationOverview",) @@ -1272,20 +1170,15 @@ class InspectContentRequest(proto.Message): Deprecated. This field has no effect. """ - parent = proto.Field(proto.STRING, number=1) - + parent = proto.Field(proto.STRING, number=1,) inspect_config = proto.Field(proto.MESSAGE, number=2, message="InspectConfig",) - item = proto.Field(proto.MESSAGE, number=3, message="ContentItem",) - - inspect_template_name = proto.Field(proto.STRING, number=4) - - location_id = proto.Field(proto.STRING, number=5) + inspect_template_name = proto.Field(proto.STRING, number=4,) + location_id = proto.Field(proto.STRING, number=5,) class InspectContentResponse(proto.Message): r"""Results of inspecting an item. - Attributes: result (google.cloud.dlp_v2.types.InspectResult): The findings. @@ -1296,7 +1189,6 @@ class InspectContentResponse(proto.Message): class OutputStorageConfig(proto.Message): r"""Cloud repository for storing output. - Attributes: table (google.cloud.dlp_v2.types.BigQueryTable): Store findings in an existing table or a new table in an @@ -1343,13 +1235,11 @@ class OutputSchema(proto.Enum): table = proto.Field( proto.MESSAGE, number=1, oneof="type", message=storage.BigQueryTable, ) - output_schema = proto.Field(proto.ENUM, number=3, enum=OutputSchema,) class InfoTypeStats(proto.Message): r"""Statistics regarding a specific InfoType. - Attributes: info_type (google.cloud.dlp_v2.types.InfoType): The type of finding this stat is for. @@ -1358,13 +1248,11 @@ class InfoTypeStats(proto.Message): """ info_type = proto.Field(proto.MESSAGE, number=1, message=storage.InfoType,) - - count = proto.Field(proto.INT64, number=2) + count = proto.Field(proto.INT64, number=2,) class InspectDataSourceDetails(proto.Message): r"""The results of an inspect DataSource job. - Attributes: requested_options (google.cloud.dlp_v2.types.InspectDataSourceDetails.RequestedOptions): The configuration used for this job. @@ -1374,7 +1262,6 @@ class InspectDataSourceDetails(proto.Message): class RequestedOptions(proto.Message): r"""Snapshot of the inspection configuration. - Attributes: snapshot_inspect_template (google.cloud.dlp_v2.types.InspectTemplate): If run with an InspectTemplate, a snapshot of @@ -1386,7 +1273,6 @@ class RequestedOptions(proto.Message): snapshot_inspect_template = proto.Field( proto.MESSAGE, number=1, message="InspectTemplate", ) - job_config = proto.Field(proto.MESSAGE, number=3, message="InspectJobConfig",) class Result(proto.Message): @@ -1410,26 +1296,21 @@ class Result(proto.Message): launch-stages. """ - processed_bytes = proto.Field(proto.INT64, number=1) - - total_estimated_bytes = proto.Field(proto.INT64, number=2) - + processed_bytes = proto.Field(proto.INT64, number=1,) + total_estimated_bytes = proto.Field(proto.INT64, number=2,) info_type_stats = proto.RepeatedField( proto.MESSAGE, number=3, message="InfoTypeStats", ) - hybrid_stats = proto.Field( proto.MESSAGE, number=7, message="HybridInspectStatistics", ) requested_options = proto.Field(proto.MESSAGE, number=2, message=RequestedOptions,) - result = proto.Field(proto.MESSAGE, number=3, message=Result,) class HybridInspectStatistics(proto.Message): r"""Statistics related to processing hybrid inspect requests. - Attributes: processed_count (int): The number of hybrid inspection requests @@ -1447,16 +1328,13 @@ class HybridInspectStatistics(proto.Message): enqueued for. """ - processed_count = proto.Field(proto.INT64, number=1) - - aborted_count = proto.Field(proto.INT64, number=2) - - pending_count = proto.Field(proto.INT64, number=3) + processed_count = proto.Field(proto.INT64, number=1,) + aborted_count = proto.Field(proto.INT64, number=2,) + pending_count = proto.Field(proto.INT64, number=3,) class InfoTypeDescription(proto.Message): r"""InfoType description. - Attributes: name (str): Internal name of the infoType. @@ -1470,20 +1348,16 @@ class InfoTypeDescription(proto.Message): language is provided in the request. """ - name = proto.Field(proto.STRING, number=1) - - display_name = proto.Field(proto.STRING, number=2) - + name = proto.Field(proto.STRING, number=1,) + display_name = proto.Field(proto.STRING, number=2,) supported_by = proto.RepeatedField( proto.ENUM, number=3, enum="InfoTypeSupportedBy", ) - - description = proto.Field(proto.STRING, number=4) + description = proto.Field(proto.STRING, number=4,) class ListInfoTypesRequest(proto.Message): r"""Request for the list of infoTypes. - Attributes: parent (str): The parent resource name. @@ -1505,18 +1379,14 @@ class ListInfoTypesRequest(proto.Message): Deprecated. This field has no effect. """ - parent = proto.Field(proto.STRING, number=4) - - language_code = proto.Field(proto.STRING, number=1) - - filter = proto.Field(proto.STRING, number=2) - - location_id = proto.Field(proto.STRING, number=3) + parent = proto.Field(proto.STRING, number=4,) + language_code = proto.Field(proto.STRING, number=1,) + filter = proto.Field(proto.STRING, number=2,) + location_id = proto.Field(proto.STRING, number=3,) class ListInfoTypesResponse(proto.Message): r"""Response to the ListInfoTypes request. - Attributes: info_types (Sequence[google.cloud.dlp_v2.types.InfoTypeDescription]): Set of sensitive infoTypes. @@ -1543,15 +1413,12 @@ class RiskAnalysisJobConfig(proto.Message): """ privacy_metric = proto.Field(proto.MESSAGE, number=1, message="PrivacyMetric",) - source_table = proto.Field(proto.MESSAGE, number=2, message=storage.BigQueryTable,) - actions = proto.RepeatedField(proto.MESSAGE, number=3, message="Action",) class QuasiId(proto.Message): r"""A column with a semantic tag attached. - Attributes: field (google.cloud.dlp_v2.types.FieldId): Required. Identifies the column. @@ -1574,14 +1441,13 @@ class QuasiId(proto.Message): """ field = proto.Field(proto.MESSAGE, number=1, message=storage.FieldId,) - info_type = proto.Field( proto.MESSAGE, number=2, oneof="tag", message=storage.InfoType, ) - - custom_tag = proto.Field(proto.STRING, number=3, oneof="tag") - - inferred = proto.Field(proto.MESSAGE, number=4, oneof="tag", message=empty.Empty,) + custom_tag = proto.Field(proto.STRING, number=3, oneof="tag",) + inferred = proto.Field( + proto.MESSAGE, number=4, oneof="tag", message=empty_pb2.Empty, + ) class StatisticalTable(proto.Message): @@ -1620,21 +1486,17 @@ class QuasiIdentifierField(proto.Message): """ field = proto.Field(proto.MESSAGE, number=1, message=storage.FieldId,) - - custom_tag = proto.Field(proto.STRING, number=2) + custom_tag = proto.Field(proto.STRING, number=2,) table = proto.Field(proto.MESSAGE, number=3, message=storage.BigQueryTable,) - quasi_ids = proto.RepeatedField( proto.MESSAGE, number=1, message=QuasiIdentifierField, ) - relative_frequency = proto.Field(proto.MESSAGE, number=2, message=storage.FieldId,) class PrivacyMetric(proto.Message): r"""Privacy metric to compute for reidentification risk analysis. - Attributes: numerical_stats_config (google.cloud.dlp_v2.types.PrivacyMetric.NumericalStatsConfig): Numerical stats @@ -1708,7 +1570,6 @@ class KAnonymityConfig(proto.Message): quasi_ids = proto.RepeatedField( proto.MESSAGE, number=1, message=storage.FieldId, ) - entity_id = proto.Field(proto.MESSAGE, number=2, message=storage.EntityId,) class LDiversityConfig(proto.Message): @@ -1729,7 +1590,6 @@ class LDiversityConfig(proto.Message): quasi_ids = proto.RepeatedField( proto.MESSAGE, number=1, message=storage.FieldId, ) - sensitive_attribute = proto.Field( proto.MESSAGE, number=2, message=storage.FieldId, ) @@ -1761,7 +1621,6 @@ class KMapEstimationConfig(proto.Message): class TaggedField(proto.Message): r"""A column with a semantic tag attached. - Attributes: field (google.cloud.dlp_v2.types.FieldId): Required. Identifies the column. @@ -1784,15 +1643,12 @@ class TaggedField(proto.Message): """ field = proto.Field(proto.MESSAGE, number=1, message=storage.FieldId,) - info_type = proto.Field( proto.MESSAGE, number=2, oneof="tag", message=storage.InfoType, ) - - custom_tag = proto.Field(proto.STRING, number=3, oneof="tag") - + custom_tag = proto.Field(proto.STRING, number=3, oneof="tag",) inferred = proto.Field( - proto.MESSAGE, number=4, oneof="tag", message=empty.Empty, + proto.MESSAGE, number=4, oneof="tag", message=empty_pb2.Empty, ) class AuxiliaryTable(proto.Message): @@ -1828,17 +1684,14 @@ class QuasiIdField(proto.Message): """ field = proto.Field(proto.MESSAGE, number=1, message=storage.FieldId,) - - custom_tag = proto.Field(proto.STRING, number=2) + custom_tag = proto.Field(proto.STRING, number=2,) table = proto.Field(proto.MESSAGE, number=3, message=storage.BigQueryTable,) - quasi_ids = proto.RepeatedField( proto.MESSAGE, number=1, message="PrivacyMetric.KMapEstimationConfig.AuxiliaryTable.QuasiIdField", ) - relative_frequency = proto.Field( proto.MESSAGE, number=2, message=storage.FieldId, ) @@ -1848,9 +1701,7 @@ class QuasiIdField(proto.Message): number=1, message="PrivacyMetric.KMapEstimationConfig.TaggedField", ) - - region_code = proto.Field(proto.STRING, number=2) - + region_code = proto.Field(proto.STRING, number=2,) auxiliary_tables = proto.RepeatedField( proto.MESSAGE, number=3, @@ -1879,9 +1730,7 @@ class DeltaPresenceEstimationConfig(proto.Message): """ quasi_ids = proto.RepeatedField(proto.MESSAGE, number=1, message="QuasiId",) - - region_code = proto.Field(proto.STRING, number=2) - + region_code = proto.Field(proto.STRING, number=2,) auxiliary_tables = proto.RepeatedField( proto.MESSAGE, number=3, message="StatisticalTable", ) @@ -1889,23 +1738,18 @@ class DeltaPresenceEstimationConfig(proto.Message): numerical_stats_config = proto.Field( proto.MESSAGE, number=1, oneof="type", message=NumericalStatsConfig, ) - categorical_stats_config = proto.Field( proto.MESSAGE, number=2, oneof="type", message=CategoricalStatsConfig, ) - k_anonymity_config = proto.Field( proto.MESSAGE, number=3, oneof="type", message=KAnonymityConfig, ) - l_diversity_config = proto.Field( proto.MESSAGE, number=4, oneof="type", message=LDiversityConfig, ) - k_map_estimation_config = proto.Field( proto.MESSAGE, number=5, oneof="type", message=KMapEstimationConfig, ) - delta_presence_estimation_config = proto.Field( proto.MESSAGE, number=6, oneof="type", message=DeltaPresenceEstimationConfig, ) @@ -1913,7 +1757,6 @@ class DeltaPresenceEstimationConfig(proto.Message): class AnalyzeDataSourceRiskDetails(proto.Message): r"""Result of a risk analysis operation request. - Attributes: requested_privacy_metric (google.cloud.dlp_v2.types.PrivacyMetric): Privacy metric to compute. @@ -1937,7 +1780,6 @@ class AnalyzeDataSourceRiskDetails(proto.Message): class NumericalStatsResult(proto.Message): r"""Result of the numerical stats computation. - Attributes: min_value (google.cloud.dlp_v2.types.Value): Minimum value appearing in the column. @@ -1949,14 +1791,11 @@ class NumericalStatsResult(proto.Message): """ min_value = proto.Field(proto.MESSAGE, number=1, message="Value",) - max_value = proto.Field(proto.MESSAGE, number=2, message="Value",) - quantile_values = proto.RepeatedField(proto.MESSAGE, number=4, message="Value",) class CategoricalStatsResult(proto.Message): r"""Result of the categorical stats computation. - Attributes: value_frequency_histogram_buckets (Sequence[google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.CategoricalStatsResult.CategoricalStatsHistogramBucket]): Histogram of value frequencies in the column. @@ -1964,7 +1803,6 @@ class CategoricalStatsResult(proto.Message): class CategoricalStatsHistogramBucket(proto.Message): r"""Histogram of value frequencies in the column. - Attributes: value_frequency_lower_bound (int): Lower bound on the value frequency of the @@ -1983,17 +1821,13 @@ class CategoricalStatsHistogramBucket(proto.Message): bucket. """ - value_frequency_lower_bound = proto.Field(proto.INT64, number=1) - - value_frequency_upper_bound = proto.Field(proto.INT64, number=2) - - bucket_size = proto.Field(proto.INT64, number=3) - + value_frequency_lower_bound = proto.Field(proto.INT64, number=1,) + value_frequency_upper_bound = proto.Field(proto.INT64, number=2,) + bucket_size = proto.Field(proto.INT64, number=3,) bucket_values = proto.RepeatedField( proto.MESSAGE, number=4, message="ValueFrequency", ) - - bucket_value_count = proto.Field(proto.INT64, number=5) + bucket_value_count = proto.Field(proto.INT64, number=5,) value_frequency_histogram_buckets = proto.RepeatedField( proto.MESSAGE, @@ -2003,7 +1837,6 @@ class CategoricalStatsHistogramBucket(proto.Message): class KAnonymityResult(proto.Message): r"""Result of the k-anonymity computation. - Attributes: equivalence_class_histogram_buckets (Sequence[google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.KAnonymityResult.KAnonymityHistogramBucket]): Histogram of k-anonymity equivalence classes. @@ -2027,12 +1860,10 @@ class KAnonymityEquivalenceClass(proto.Message): quasi_ids_values = proto.RepeatedField( proto.MESSAGE, number=1, message="Value", ) - - equivalence_class_size = proto.Field(proto.INT64, number=2) + equivalence_class_size = proto.Field(proto.INT64, number=2,) class KAnonymityHistogramBucket(proto.Message): r"""Histogram of k-anonymity equivalence classes. - Attributes: equivalence_class_size_lower_bound (int): Lower bound on the size of the equivalence @@ -2052,19 +1883,15 @@ class KAnonymityHistogramBucket(proto.Message): in this bucket. """ - equivalence_class_size_lower_bound = proto.Field(proto.INT64, number=1) - - equivalence_class_size_upper_bound = proto.Field(proto.INT64, number=2) - - bucket_size = proto.Field(proto.INT64, number=3) - + equivalence_class_size_lower_bound = proto.Field(proto.INT64, number=1,) + equivalence_class_size_upper_bound = proto.Field(proto.INT64, number=2,) + bucket_size = proto.Field(proto.INT64, number=3,) bucket_values = proto.RepeatedField( proto.MESSAGE, number=4, message="AnalyzeDataSourceRiskDetails.KAnonymityResult.KAnonymityEquivalenceClass", ) - - bucket_value_count = proto.Field(proto.INT64, number=5) + bucket_value_count = proto.Field(proto.INT64, number=5,) equivalence_class_histogram_buckets = proto.RepeatedField( proto.MESSAGE, @@ -2074,7 +1901,6 @@ class KAnonymityHistogramBucket(proto.Message): class LDiversityResult(proto.Message): r"""Result of the l-diversity computation. - Attributes: sensitive_value_frequency_histogram_buckets (Sequence[google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.LDiversityResult.LDiversityHistogramBucket]): Histogram of l-diversity equivalence class @@ -2103,11 +1929,8 @@ class LDiversityEquivalenceClass(proto.Message): quasi_ids_values = proto.RepeatedField( proto.MESSAGE, number=1, message="Value", ) - - equivalence_class_size = proto.Field(proto.INT64, number=2) - - num_distinct_sensitive_values = proto.Field(proto.INT64, number=3) - + equivalence_class_size = proto.Field(proto.INT64, number=2,) + num_distinct_sensitive_values = proto.Field(proto.INT64, number=3,) top_sensitive_values = proto.RepeatedField( proto.MESSAGE, number=4, message="ValueFrequency", ) @@ -2137,19 +1960,15 @@ class LDiversityHistogramBucket(proto.Message): in this bucket. """ - sensitive_value_frequency_lower_bound = proto.Field(proto.INT64, number=1) - - sensitive_value_frequency_upper_bound = proto.Field(proto.INT64, number=2) - - bucket_size = proto.Field(proto.INT64, number=3) - + sensitive_value_frequency_lower_bound = proto.Field(proto.INT64, number=1,) + sensitive_value_frequency_upper_bound = proto.Field(proto.INT64, number=2,) + bucket_size = proto.Field(proto.INT64, number=3,) bucket_values = proto.RepeatedField( proto.MESSAGE, number=4, message="AnalyzeDataSourceRiskDetails.LDiversityResult.LDiversityEquivalenceClass", ) - - bucket_value_count = proto.Field(proto.INT64, number=5) + bucket_value_count = proto.Field(proto.INT64, number=5,) sensitive_value_frequency_histogram_buckets = proto.RepeatedField( proto.MESSAGE, @@ -2175,7 +1994,6 @@ class KMapEstimationResult(proto.Message): class KMapEstimationQuasiIdValues(proto.Message): r"""A tuple of values for the quasi-identifier columns. - Attributes: quasi_ids_values (Sequence[google.cloud.dlp_v2.types.Value]): The quasi-identifier values. @@ -2187,8 +2005,7 @@ class KMapEstimationQuasiIdValues(proto.Message): quasi_ids_values = proto.RepeatedField( proto.MESSAGE, number=1, message="Value", ) - - estimated_anonymity = proto.Field(proto.INT64, number=2) + estimated_anonymity = proto.Field(proto.INT64, number=2,) class KMapEstimationHistogramBucket(proto.Message): r"""A KMapEstimationHistogramBucket message with the following values: @@ -2215,19 +2032,15 @@ class KMapEstimationHistogramBucket(proto.Message): tuple values in this bucket. """ - min_anonymity = proto.Field(proto.INT64, number=1) - - max_anonymity = proto.Field(proto.INT64, number=2) - - bucket_size = proto.Field(proto.INT64, number=5) - + min_anonymity = proto.Field(proto.INT64, number=1,) + max_anonymity = proto.Field(proto.INT64, number=2,) + bucket_size = proto.Field(proto.INT64, number=5,) bucket_values = proto.RepeatedField( proto.MESSAGE, number=6, message="AnalyzeDataSourceRiskDetails.KMapEstimationResult.KMapEstimationQuasiIdValues", ) - - bucket_value_count = proto.Field(proto.INT64, number=7) + bucket_value_count = proto.Field(proto.INT64, number=7,) k_map_estimation_histogram = proto.RepeatedField( proto.MESSAGE, @@ -2254,7 +2067,6 @@ class DeltaPresenceEstimationResult(proto.Message): class DeltaPresenceEstimationQuasiIdValues(proto.Message): r"""A tuple of values for the quasi-identifier columns. - Attributes: quasi_ids_values (Sequence[google.cloud.dlp_v2.types.Value]): The quasi-identifier values. @@ -2274,8 +2086,7 @@ class DeltaPresenceEstimationQuasiIdValues(proto.Message): quasi_ids_values = proto.RepeatedField( proto.MESSAGE, number=1, message="Value", ) - - estimated_probability = proto.Field(proto.DOUBLE, number=2) + estimated_probability = proto.Field(proto.DOUBLE, number=2,) class DeltaPresenceEstimationHistogramBucket(proto.Message): r"""A DeltaPresenceEstimationHistogramBucket message with the following @@ -2302,19 +2113,15 @@ class DeltaPresenceEstimationHistogramBucket(proto.Message): tuple values in this bucket. """ - min_probability = proto.Field(proto.DOUBLE, number=1) - - max_probability = proto.Field(proto.DOUBLE, number=2) - - bucket_size = proto.Field(proto.INT64, number=5) - + min_probability = proto.Field(proto.DOUBLE, number=1,) + max_probability = proto.Field(proto.DOUBLE, number=2,) + bucket_size = proto.Field(proto.INT64, number=5,) bucket_values = proto.RepeatedField( proto.MESSAGE, number=6, message="AnalyzeDataSourceRiskDetails.DeltaPresenceEstimationResult.DeltaPresenceEstimationQuasiIdValues", ) - - bucket_value_count = proto.Field(proto.INT64, number=7) + bucket_value_count = proto.Field(proto.INT64, number=7,) delta_presence_estimation_histogram = proto.RepeatedField( proto.MESSAGE, @@ -2324,7 +2131,6 @@ class DeltaPresenceEstimationHistogramBucket(proto.Message): class RequestedRiskAnalysisOptions(proto.Message): r"""Risk analysis options. - Attributes: job_config (google.cloud.dlp_v2.types.RiskAnalysisJobConfig): The job config for the risk job. @@ -2337,35 +2143,27 @@ class RequestedRiskAnalysisOptions(proto.Message): requested_privacy_metric = proto.Field( proto.MESSAGE, number=1, message="PrivacyMetric", ) - requested_source_table = proto.Field( proto.MESSAGE, number=2, message=storage.BigQueryTable, ) - numerical_stats_result = proto.Field( proto.MESSAGE, number=3, oneof="result", message=NumericalStatsResult, ) - categorical_stats_result = proto.Field( proto.MESSAGE, number=4, oneof="result", message=CategoricalStatsResult, ) - k_anonymity_result = proto.Field( proto.MESSAGE, number=5, oneof="result", message=KAnonymityResult, ) - l_diversity_result = proto.Field( proto.MESSAGE, number=6, oneof="result", message=LDiversityResult, ) - k_map_estimation_result = proto.Field( proto.MESSAGE, number=7, oneof="result", message=KMapEstimationResult, ) - delta_presence_estimation_result = proto.Field( proto.MESSAGE, number=9, oneof="result", message=DeltaPresenceEstimationResult, ) - requested_options = proto.Field( proto.MESSAGE, number=10, message=RequestedRiskAnalysisOptions, ) @@ -2373,7 +2171,6 @@ class RequestedRiskAnalysisOptions(proto.Message): class ValueFrequency(proto.Message): r"""A value of a field, including its frequency. - Attributes: value (google.cloud.dlp_v2.types.Value): A value contained in the field in question. @@ -2383,8 +2180,7 @@ class ValueFrequency(proto.Message): """ value = proto.Field(proto.MESSAGE, number=1, message="Value",) - - count = proto.Field(proto.INT64, number=2) + count = proto.Field(proto.INT64, number=2,) class Value(proto.Message): @@ -2414,34 +2210,26 @@ class Value(proto.Message): day of week """ - integer_value = proto.Field(proto.INT64, number=1, oneof="type") - - float_value = proto.Field(proto.DOUBLE, number=2, oneof="type") - - string_value = proto.Field(proto.STRING, number=3, oneof="type") - - boolean_value = proto.Field(proto.BOOL, number=4, oneof="type") - + integer_value = proto.Field(proto.INT64, number=1, oneof="type",) + float_value = proto.Field(proto.DOUBLE, number=2, oneof="type",) + string_value = proto.Field(proto.STRING, number=3, oneof="type",) + boolean_value = proto.Field(proto.BOOL, number=4, oneof="type",) timestamp_value = proto.Field( - proto.MESSAGE, number=5, oneof="type", message=timestamp.Timestamp, + proto.MESSAGE, number=5, oneof="type", message=timestamp_pb2.Timestamp, ) - time_value = proto.Field( - proto.MESSAGE, number=6, oneof="type", message=timeofday.TimeOfDay, + proto.MESSAGE, number=6, oneof="type", message=timeofday_pb2.TimeOfDay, ) - date_value = proto.Field( - proto.MESSAGE, number=7, oneof="type", message=gt_date.Date, + proto.MESSAGE, number=7, oneof="type", message=date_pb2.Date, ) - day_of_week_value = proto.Field( - proto.ENUM, number=8, oneof="type", enum=dayofweek.DayOfWeek, + proto.ENUM, number=8, oneof="type", enum=dayofweek_pb2.DayOfWeek, ) class QuoteInfo(proto.Message): r"""Message for infoType-dependent details parsed from quote. - Attributes: date_time (google.cloud.dlp_v2.types.DateTime): The date time indicated by the quote. @@ -2470,7 +2258,6 @@ class DateTime(proto.Message): class TimeZone(proto.Message): r"""Time zone of the date time object. - Attributes: offset_minutes (int): Set only if the offset can be determined. @@ -2478,20 +2265,16 @@ class TimeZone(proto.Message): "UTC-9", this value is -540. """ - offset_minutes = proto.Field(proto.INT32, number=1) - - date = proto.Field(proto.MESSAGE, number=1, message=gt_date.Date,) - - day_of_week = proto.Field(proto.ENUM, number=2, enum=dayofweek.DayOfWeek,) - - time = proto.Field(proto.MESSAGE, number=3, message=timeofday.TimeOfDay,) + offset_minutes = proto.Field(proto.INT32, number=1,) + date = proto.Field(proto.MESSAGE, number=1, message=date_pb2.Date,) + day_of_week = proto.Field(proto.ENUM, number=2, enum=dayofweek_pb2.DayOfWeek,) + time = proto.Field(proto.MESSAGE, number=3, message=timeofday_pb2.TimeOfDay,) time_zone = proto.Field(proto.MESSAGE, number=4, message=TimeZone,) class DeidentifyConfig(proto.Message): r"""The configuration that controls how the data will change. - Attributes: info_type_transformations (google.cloud.dlp_v2.types.InfoTypeTransformations): Treat the dataset as free-form text and apply @@ -2513,14 +2296,12 @@ class DeidentifyConfig(proto.Message): oneof="transformation", message="InfoTypeTransformations", ) - record_transformations = proto.Field( proto.MESSAGE, number=2, oneof="transformation", message="RecordTransformations", ) - transformation_error_handling = proto.Field( proto.MESSAGE, number=3, message="TransformationErrorHandling", ) @@ -2546,19 +2327,18 @@ class TransformationErrorHandling(proto.Message): class ThrowError(proto.Message): r"""Throw an error and fail the request when a transformation error occurs. - """ + """ class LeaveUntransformed(proto.Message): r"""Skips the data without modifying it if the requested transformation would cause an error. For example, if a ``DateShift`` transformation were applied an an IP address, this mode would leave the IP address unchanged in the response. - """ + """ throw_error = proto.Field( proto.MESSAGE, number=1, oneof="mode", message=ThrowError, ) - leave_untransformed = proto.Field( proto.MESSAGE, number=2, oneof="mode", message=LeaveUntransformed, ) @@ -2566,7 +2346,6 @@ class LeaveUntransformed(proto.Message): class PrimitiveTransformation(proto.Message): r"""A rule for transforming a value. - Attributes: replace_config (google.cloud.dlp_v2.types.ReplaceValueConfig): Replace @@ -2595,52 +2374,42 @@ class PrimitiveTransformation(proto.Message): replace_config = proto.Field( proto.MESSAGE, number=1, oneof="transformation", message="ReplaceValueConfig", ) - redact_config = proto.Field( proto.MESSAGE, number=2, oneof="transformation", message="RedactConfig", ) - character_mask_config = proto.Field( proto.MESSAGE, number=3, oneof="transformation", message="CharacterMaskConfig", ) - crypto_replace_ffx_fpe_config = proto.Field( proto.MESSAGE, number=4, oneof="transformation", message="CryptoReplaceFfxFpeConfig", ) - fixed_size_bucketing_config = proto.Field( proto.MESSAGE, number=5, oneof="transformation", message="FixedSizeBucketingConfig", ) - bucketing_config = proto.Field( proto.MESSAGE, number=6, oneof="transformation", message="BucketingConfig", ) - replace_with_info_type_config = proto.Field( proto.MESSAGE, number=7, oneof="transformation", message="ReplaceWithInfoTypeConfig", ) - time_part_config = proto.Field( proto.MESSAGE, number=8, oneof="transformation", message="TimePartConfig", ) - crypto_hash_config = proto.Field( proto.MESSAGE, number=9, oneof="transformation", message="CryptoHashConfig", ) - date_shift_config = proto.Field( proto.MESSAGE, number=11, oneof="transformation", message="DateShiftConfig", ) - crypto_deterministic_config = proto.Field( proto.MESSAGE, number=12, @@ -2761,17 +2530,14 @@ class CryptoDeterministicConfig(proto.Message): """ crypto_key = proto.Field(proto.MESSAGE, number=1, message="CryptoKey",) - surrogate_info_type = proto.Field( proto.MESSAGE, number=2, message=storage.InfoType, ) - context = proto.Field(proto.MESSAGE, number=3, message=storage.FieldId,) class ReplaceValueConfig(proto.Message): r"""Replace each input value with a given ``Value``. - Attributes: new_value (google.cloud.dlp_v2.types.Value): Value to replace it with. @@ -2781,7 +2547,7 @@ class ReplaceValueConfig(proto.Message): class ReplaceWithInfoTypeConfig(proto.Message): - r"""Replace each matching finding with the name of the info_type.""" + r"""Replace each matching finding with the name of the info_type. """ class RedactConfig(proto.Message): @@ -2789,7 +2555,7 @@ class RedactConfig(proto.Message): ``InfoTypeTransformation`` transforming PHONE_NUMBER, and input 'My phone number is 206-555-0123', the output would be 'My phone number is '. - """ + """ class CharsToIgnore(proto.Message): @@ -2815,8 +2581,7 @@ class CommonCharsToIgnore(proto.Enum): PUNCTUATION = 4 WHITESPACE = 5 - characters_to_skip = proto.Field(proto.STRING, number=1, oneof="characters") - + characters_to_skip = proto.Field(proto.STRING, number=1, oneof="characters",) common_characters_to_ignore = proto.Field( proto.ENUM, number=2, oneof="characters", enum=CommonCharsToIgnore, ) @@ -2857,12 +2622,9 @@ class CharacterMaskConfig(proto.Message): ``***-**5-5555``. """ - masking_character = proto.Field(proto.STRING, number=1) - - number_to_mask = proto.Field(proto.INT32, number=2) - - reverse_order = proto.Field(proto.BOOL, number=3) - + masking_character = proto.Field(proto.STRING, number=1,) + number_to_mask = proto.Field(proto.INT32, number=2,) + reverse_order = proto.Field(proto.BOOL, number=3,) characters_to_ignore = proto.RepeatedField( proto.MESSAGE, number=4, message="CharsToIgnore", ) @@ -2909,10 +2671,8 @@ class FixedSizeBucketingConfig(proto.Message): """ lower_bound = proto.Field(proto.MESSAGE, number=1, message="Value",) - upper_bound = proto.Field(proto.MESSAGE, number=2, message="Value",) - - bucket_size = proto.Field(proto.DOUBLE, number=3) + bucket_size = proto.Field(proto.DOUBLE, number=3,) class BucketingConfig(proto.Message): @@ -2948,9 +2708,7 @@ class Bucket(proto.Message): """ min_ = proto.Field(proto.MESSAGE, number=1, message="Value",) - max_ = proto.Field(proto.MESSAGE, number=2, message="Value",) - replacement_value = proto.Field(proto.MESSAGE, number=3, message="Value",) buckets = proto.RepeatedField(proto.MESSAGE, number=1, message=Bucket,) @@ -3061,17 +2819,12 @@ class FfxCommonNativeAlphabet(proto.Enum): ALPHA_NUMERIC = 4 crypto_key = proto.Field(proto.MESSAGE, number=1, message="CryptoKey",) - context = proto.Field(proto.MESSAGE, number=2, message=storage.FieldId,) - common_alphabet = proto.Field( proto.ENUM, number=4, oneof="alphabet", enum=FfxCommonNativeAlphabet, ) - - custom_alphabet = proto.Field(proto.STRING, number=5, oneof="alphabet") - - radix = proto.Field(proto.INT32, number=6, oneof="alphabet") - + custom_alphabet = proto.Field(proto.STRING, number=5, oneof="alphabet",) + radix = proto.Field(proto.INT32, number=6, oneof="alphabet",) surrogate_info_type = proto.Field( proto.MESSAGE, number=8, message=storage.InfoType, ) @@ -3096,11 +2849,9 @@ class CryptoKey(proto.Message): transient = proto.Field( proto.MESSAGE, number=1, oneof="source", message="TransientCryptoKey", ) - unwrapped = proto.Field( proto.MESSAGE, number=2, oneof="source", message="UnwrappedCryptoKey", ) - kms_wrapped = proto.Field( proto.MESSAGE, number=3, oneof="source", message="KmsWrappedCryptoKey", ) @@ -3121,7 +2872,7 @@ class TransientCryptoKey(proto.Message): being generated). """ - name = proto.Field(proto.STRING, number=1) + name = proto.Field(proto.STRING, number=1,) class UnwrappedCryptoKey(proto.Message): @@ -3133,7 +2884,7 @@ class UnwrappedCryptoKey(proto.Message): Required. A 128/192/256 bit key. """ - key = proto.Field(proto.BYTES, number=1) + key = proto.Field(proto.BYTES, number=1,) class KmsWrappedCryptoKey(proto.Message): @@ -3151,9 +2902,8 @@ class KmsWrappedCryptoKey(proto.Message): CryptoKey to use for unwrapping. """ - wrapped_key = proto.Field(proto.BYTES, number=1) - - crypto_key_name = proto.Field(proto.STRING, number=2) + wrapped_key = proto.Field(proto.BYTES, number=1,) + crypto_key_name = proto.Field(proto.STRING, number=2,) class DateShiftConfig(proto.Message): @@ -3186,12 +2936,9 @@ class DateShiftConfig(proto.Message): applied to table items. """ - upper_bound_days = proto.Field(proto.INT32, number=1) - - lower_bound_days = proto.Field(proto.INT32, number=2) - + upper_bound_days = proto.Field(proto.INT32, number=1,) + lower_bound_days = proto.Field(proto.INT32, number=2,) context = proto.Field(proto.MESSAGE, number=3, message=storage.FieldId,) - crypto_key = proto.Field( proto.MESSAGE, number=4, oneof="method", message="CryptoKey", ) @@ -3228,7 +2975,6 @@ class InfoTypeTransformation(proto.Message): info_types = proto.RepeatedField( proto.MESSAGE, number=1, message=storage.InfoType, ) - primitive_transformation = proto.Field( proto.MESSAGE, number=2, message="PrimitiveTransformation", ) @@ -3240,7 +2986,6 @@ class InfoTypeTransformation(proto.Message): class FieldTransformation(proto.Message): r"""The transformation to apply to the field. - Attributes: fields (Sequence[google.cloud.dlp_v2.types.FieldId]): Required. Input field(s) to apply the @@ -3266,16 +3011,13 @@ class FieldTransformation(proto.Message): """ fields = proto.RepeatedField(proto.MESSAGE, number=1, message=storage.FieldId,) - condition = proto.Field(proto.MESSAGE, number=3, message="RecordCondition",) - primitive_transformation = proto.Field( proto.MESSAGE, number=4, oneof="transformation", message="PrimitiveTransformation", ) - info_type_transformations = proto.Field( proto.MESSAGE, number=5, @@ -3301,7 +3043,6 @@ class RecordTransformations(proto.Message): field_transformations = proto.RepeatedField( proto.MESSAGE, number=1, message="FieldTransformation", ) - record_suppressions = proto.RepeatedField( proto.MESSAGE, number=2, message="RecordSuppression", ) @@ -3364,14 +3105,11 @@ class Condition(proto.Message): """ field = proto.Field(proto.MESSAGE, number=1, message=storage.FieldId,) - operator = proto.Field(proto.ENUM, number=3, enum="RelationalOperator",) - value = proto.Field(proto.MESSAGE, number=4, message="Value",) class Conditions(proto.Message): r"""A collection of conditions. - Attributes: conditions (Sequence[google.cloud.dlp_v2.types.RecordCondition.Condition]): A collection of conditions. @@ -3383,7 +3121,6 @@ class Conditions(proto.Message): class Expressions(proto.Message): r"""An expression, consisting or an operator and conditions. - Attributes: logical_operator (google.cloud.dlp_v2.types.RecordCondition.Expressions.LogicalOperator): The operator to apply to the result of conditions. Default @@ -3400,7 +3137,6 @@ class LogicalOperator(proto.Enum): logical_operator = proto.Field( proto.ENUM, number=1, enum="RecordCondition.Expressions.LogicalOperator", ) - conditions = proto.Field( proto.MESSAGE, number=3, oneof="type", message="RecordCondition.Conditions", ) @@ -3410,7 +3146,6 @@ class LogicalOperator(proto.Enum): class TransformationOverview(proto.Message): r"""Overview of the modifications that occurred. - Attributes: transformed_bytes (int): Total size in bytes that were transformed in @@ -3419,8 +3154,7 @@ class TransformationOverview(proto.Message): Transformations applied to the dataset. """ - transformed_bytes = proto.Field(proto.INT64, number=2) - + transformed_bytes = proto.Field(proto.INT64, number=2,) transformation_summaries = proto.RepeatedField( proto.MESSAGE, number=3, message="TransformationSummary", ) @@ -3477,36 +3211,27 @@ class SummaryResult(proto.Message): a transformation didn't work as expected. """ - count = proto.Field(proto.INT64, number=1) - + count = proto.Field(proto.INT64, number=1,) code = proto.Field( proto.ENUM, number=2, enum="TransformationSummary.TransformationResultCode", ) - - details = proto.Field(proto.STRING, number=3) + details = proto.Field(proto.STRING, number=3,) info_type = proto.Field(proto.MESSAGE, number=1, message=storage.InfoType,) - field = proto.Field(proto.MESSAGE, number=2, message=storage.FieldId,) - transformation = proto.Field( proto.MESSAGE, number=3, message="PrimitiveTransformation", ) - field_transformations = proto.RepeatedField( proto.MESSAGE, number=5, message="FieldTransformation", ) - record_suppress = proto.Field(proto.MESSAGE, number=6, message="RecordSuppression",) - results = proto.RepeatedField(proto.MESSAGE, number=4, message=SummaryResult,) - - transformed_bytes = proto.Field(proto.INT64, number=7) + transformed_bytes = proto.Field(proto.INT64, number=7,) class Schedule(proto.Message): r"""Schedule for triggeredJobs. - Attributes: recurrence_period_duration (google.protobuf.duration_pb2.Duration): With this option a job is started a regular @@ -3521,14 +3246,14 @@ class Schedule(proto.Message): """ recurrence_period_duration = proto.Field( - proto.MESSAGE, number=1, oneof="option", message=duration.Duration, + proto.MESSAGE, number=1, oneof="option", message=duration_pb2.Duration, ) class Manual(proto.Message): r"""Job trigger option for hybrid jobs. Jobs must be manually created and finished. - """ + """ class InspectTemplate(proto.Message): @@ -3560,16 +3285,11 @@ class InspectTemplate(proto.Message): Configuration of the scanning process. """ - name = proto.Field(proto.STRING, number=1) - - display_name = proto.Field(proto.STRING, number=2) - - description = proto.Field(proto.STRING, number=3) - - create_time = proto.Field(proto.MESSAGE, number=4, message=timestamp.Timestamp,) - - update_time = proto.Field(proto.MESSAGE, number=5, message=timestamp.Timestamp,) - + name = proto.Field(proto.STRING, number=1,) + display_name = proto.Field(proto.STRING, number=2,) + description = proto.Field(proto.STRING, number=3,) + create_time = proto.Field(proto.MESSAGE, number=4, message=timestamp_pb2.Timestamp,) + update_time = proto.Field(proto.MESSAGE, number=5, message=timestamp_pb2.Timestamp,) inspect_config = proto.Field(proto.MESSAGE, number=6, message="InspectConfig",) @@ -3600,16 +3320,11 @@ class DeidentifyTemplate(proto.Message): template // /////////////// """ - name = proto.Field(proto.STRING, number=1) - - display_name = proto.Field(proto.STRING, number=2) - - description = proto.Field(proto.STRING, number=3) - - create_time = proto.Field(proto.MESSAGE, number=4, message=timestamp.Timestamp,) - - update_time = proto.Field(proto.MESSAGE, number=5, message=timestamp.Timestamp,) - + name = proto.Field(proto.STRING, number=1,) + display_name = proto.Field(proto.STRING, number=2,) + description = proto.Field(proto.STRING, number=3,) + create_time = proto.Field(proto.MESSAGE, number=4, message=timestamp_pb2.Timestamp,) + update_time = proto.Field(proto.MESSAGE, number=5, message=timestamp_pb2.Timestamp,) deidentify_config = proto.Field( proto.MESSAGE, number=6, message="DeidentifyConfig", ) @@ -3627,10 +3342,9 @@ class Error(proto.Message): The times the error occurred. """ - details = proto.Field(proto.MESSAGE, number=1, message=gr_status.Status,) - + details = proto.Field(proto.MESSAGE, number=1, message=status_pb2.Status,) timestamps = proto.RepeatedField( - proto.MESSAGE, number=2, message=timestamp.Timestamp, + proto.MESSAGE, number=2, message=timestamp_pb2.Timestamp, ) @@ -3691,7 +3405,6 @@ class Status(proto.Enum): class Trigger(proto.Message): r"""What event needs to occur for a new job to be started. - Attributes: schedule (google.cloud.dlp_v2.types.Schedule): Create a job on a repeating basis based on @@ -3709,31 +3422,23 @@ class Trigger(proto.Message): schedule = proto.Field( proto.MESSAGE, number=1, oneof="trigger", message="Schedule", ) - manual = proto.Field( proto.MESSAGE, number=2, oneof="trigger", message="Manual", ) - name = proto.Field(proto.STRING, number=1) - - display_name = proto.Field(proto.STRING, number=2) - - description = proto.Field(proto.STRING, number=3) - + name = proto.Field(proto.STRING, number=1,) + display_name = proto.Field(proto.STRING, number=2,) + description = proto.Field(proto.STRING, number=3,) inspect_job = proto.Field( proto.MESSAGE, number=4, oneof="job", message="InspectJobConfig", ) - triggers = proto.RepeatedField(proto.MESSAGE, number=5, message=Trigger,) - errors = proto.RepeatedField(proto.MESSAGE, number=6, message="Error",) - - create_time = proto.Field(proto.MESSAGE, number=7, message=timestamp.Timestamp,) - - update_time = proto.Field(proto.MESSAGE, number=8, message=timestamp.Timestamp,) - - last_run_time = proto.Field(proto.MESSAGE, number=9, message=timestamp.Timestamp,) - + create_time = proto.Field(proto.MESSAGE, number=7, message=timestamp_pb2.Timestamp,) + update_time = proto.Field(proto.MESSAGE, number=8, message=timestamp_pb2.Timestamp,) + last_run_time = proto.Field( + proto.MESSAGE, number=9, message=timestamp_pb2.Timestamp, + ) status = proto.Field(proto.ENUM, number=10, enum=Status,) @@ -3792,7 +3497,7 @@ class PublishToPubSub(proto.Message): projects/{project}/topics/{topic}. """ - topic = proto.Field(proto.STRING, number=1) + topic = proto.Field(proto.STRING, number=1,) class PublishSummaryToCscc(proto.Message): r"""Publish the result summary of a DlpJob to the Cloud Security @@ -3806,7 +3511,7 @@ class PublishSummaryToCscc(proto.Message): https://cloud.google.com/terms/service-terms Only a single instance of this action can be specified. Compatible with: Inspect - """ + """ class PublishFindingsToCloudDataCatalog(proto.Message): r"""Publish findings of a DlpJob to Cloud Data Catalog. Labels @@ -3820,43 +3525,38 @@ class PublishFindingsToCloudDataCatalog(proto.Message): Only a single instance of this action can be specified and only allowed if all resources being scanned are BigQuery tables. Compatible with: Inspect - """ + """ class JobNotificationEmails(proto.Message): r"""Enable email notification to project owners and editors on jobs's completion/failure. - """ + """ class PublishToStackdriver(proto.Message): r"""Enable Stackdriver metric dlp.googleapis.com/finding_count. This will publish a metric to stack driver on each infotype requested and how many findings were found for it. CustomDetectors will be bucketed as 'Custom' under the Stackdriver label 'info_type'. - """ + """ save_findings = proto.Field( proto.MESSAGE, number=1, oneof="action", message=SaveFindings, ) - pub_sub = proto.Field( proto.MESSAGE, number=2, oneof="action", message=PublishToPubSub, ) - publish_summary_to_cscc = proto.Field( proto.MESSAGE, number=3, oneof="action", message=PublishSummaryToCscc, ) - publish_findings_to_cloud_data_catalog = proto.Field( proto.MESSAGE, number=5, oneof="action", message=PublishFindingsToCloudDataCatalog, ) - job_notification_emails = proto.Field( proto.MESSAGE, number=8, oneof="action", message=JobNotificationEmails, ) - publish_to_stackdriver = proto.Field( proto.MESSAGE, number=9, oneof="action", message=PublishToStackdriver, ) @@ -3864,7 +3564,6 @@ class PublishToStackdriver(proto.Message): class CreateInspectTemplateRequest(proto.Message): r"""Request message for CreateInspectTemplate. - Attributes: parent (str): Required. Parent resource name. @@ -3902,18 +3601,14 @@ class CreateInspectTemplateRequest(proto.Message): Deprecated. This field has no effect. """ - parent = proto.Field(proto.STRING, number=1) - + parent = proto.Field(proto.STRING, number=1,) inspect_template = proto.Field(proto.MESSAGE, number=2, message="InspectTemplate",) - - template_id = proto.Field(proto.STRING, number=3) - - location_id = proto.Field(proto.STRING, number=4) + template_id = proto.Field(proto.STRING, number=3,) + location_id = proto.Field(proto.STRING, number=4,) class UpdateInspectTemplateRequest(proto.Message): r"""Request message for UpdateInspectTemplate. - Attributes: name (str): Required. Resource name of organization and inspectTemplate @@ -3926,16 +3621,15 @@ class UpdateInspectTemplateRequest(proto.Message): Mask to control which fields get updated. """ - name = proto.Field(proto.STRING, number=1) - + name = proto.Field(proto.STRING, number=1,) inspect_template = proto.Field(proto.MESSAGE, number=2, message="InspectTemplate",) - - update_mask = proto.Field(proto.MESSAGE, number=3, message=field_mask.FieldMask,) + update_mask = proto.Field( + proto.MESSAGE, number=3, message=field_mask_pb2.FieldMask, + ) class GetInspectTemplateRequest(proto.Message): r"""Request message for GetInspectTemplate. - Attributes: name (str): Required. Resource name of the organization and @@ -3944,12 +3638,11 @@ class GetInspectTemplateRequest(proto.Message): projects/project-id/inspectTemplates/432452342. """ - name = proto.Field(proto.STRING, number=1) + name = proto.Field(proto.STRING, number=1,) class ListInspectTemplatesRequest(proto.Message): r"""Request message for ListInspectTemplates. - Attributes: parent (str): Required. Parent resource name. @@ -4001,20 +3694,15 @@ class ListInspectTemplatesRequest(proto.Message): Deprecated. This field has no effect. """ - parent = proto.Field(proto.STRING, number=1) - - page_token = proto.Field(proto.STRING, number=2) - - page_size = proto.Field(proto.INT32, number=3) - - order_by = proto.Field(proto.STRING, number=4) - - location_id = proto.Field(proto.STRING, number=5) + parent = proto.Field(proto.STRING, number=1,) + page_token = proto.Field(proto.STRING, number=2,) + page_size = proto.Field(proto.INT32, number=3,) + order_by = proto.Field(proto.STRING, number=4,) + location_id = proto.Field(proto.STRING, number=5,) class ListInspectTemplatesResponse(proto.Message): r"""Response message for ListInspectTemplates. - Attributes: inspect_templates (Sequence[google.cloud.dlp_v2.types.InspectTemplate]): List of inspectTemplates, up to page_size in @@ -4032,13 +3720,11 @@ def raw_page(self): inspect_templates = proto.RepeatedField( proto.MESSAGE, number=1, message="InspectTemplate", ) - - next_page_token = proto.Field(proto.STRING, number=2) + next_page_token = proto.Field(proto.STRING, number=2,) class DeleteInspectTemplateRequest(proto.Message): r"""Request message for DeleteInspectTemplate. - Attributes: name (str): Required. Resource name of the organization and @@ -4047,12 +3733,11 @@ class DeleteInspectTemplateRequest(proto.Message): projects/project-id/inspectTemplates/432452342. """ - name = proto.Field(proto.STRING, number=1) + name = proto.Field(proto.STRING, number=1,) class CreateJobTriggerRequest(proto.Message): r"""Request message for CreateJobTrigger. - Attributes: parent (str): Required. Parent resource name. @@ -4085,30 +3770,25 @@ class CreateJobTriggerRequest(proto.Message): Deprecated. This field has no effect. """ - parent = proto.Field(proto.STRING, number=1) - + parent = proto.Field(proto.STRING, number=1,) job_trigger = proto.Field(proto.MESSAGE, number=2, message="JobTrigger",) - - trigger_id = proto.Field(proto.STRING, number=3) - - location_id = proto.Field(proto.STRING, number=4) + trigger_id = proto.Field(proto.STRING, number=3,) + location_id = proto.Field(proto.STRING, number=4,) class ActivateJobTriggerRequest(proto.Message): r"""Request message for ActivateJobTrigger. - Attributes: name (str): Required. Resource name of the trigger to activate, for example ``projects/dlp-test-project/jobTriggers/53234423``. """ - name = proto.Field(proto.STRING, number=1) + name = proto.Field(proto.STRING, number=1,) class UpdateJobTriggerRequest(proto.Message): r"""Request message for UpdateJobTrigger. - Attributes: name (str): Required. Resource name of the project and the triggeredJob, @@ -4120,16 +3800,15 @@ class UpdateJobTriggerRequest(proto.Message): Mask to control which fields get updated. """ - name = proto.Field(proto.STRING, number=1) - + name = proto.Field(proto.STRING, number=1,) job_trigger = proto.Field(proto.MESSAGE, number=2, message="JobTrigger",) - - update_mask = proto.Field(proto.MESSAGE, number=3, message=field_mask.FieldMask,) + update_mask = proto.Field( + proto.MESSAGE, number=3, message=field_mask_pb2.FieldMask, + ) class GetJobTriggerRequest(proto.Message): r"""Request message for GetJobTrigger. - Attributes: name (str): Required. Resource name of the project and the triggeredJob, @@ -4137,7 +3816,7 @@ class GetJobTriggerRequest(proto.Message): ``projects/dlp-test-project/jobTriggers/53234423``. """ - name = proto.Field(proto.STRING, number=1) + name = proto.Field(proto.STRING, number=1,) class CreateDlpJobRequest(proto.Message): @@ -4179,24 +3858,19 @@ class CreateDlpJobRequest(proto.Message): Deprecated. This field has no effect. """ - parent = proto.Field(proto.STRING, number=1) - + parent = proto.Field(proto.STRING, number=1,) inspect_job = proto.Field( proto.MESSAGE, number=2, oneof="job", message="InspectJobConfig", ) - risk_job = proto.Field( proto.MESSAGE, number=3, oneof="job", message="RiskAnalysisJobConfig", ) - - job_id = proto.Field(proto.STRING, number=4) - - location_id = proto.Field(proto.STRING, number=5) + job_id = proto.Field(proto.STRING, number=4,) + location_id = proto.Field(proto.STRING, number=5,) class ListJobTriggersRequest(proto.Message): r"""Request message for ListJobTriggers. - Attributes: parent (str): Required. Parent resource name. @@ -4284,22 +3958,16 @@ class ListJobTriggersRequest(proto.Message): Deprecated. This field has no effect. """ - parent = proto.Field(proto.STRING, number=1) - - page_token = proto.Field(proto.STRING, number=2) - - page_size = proto.Field(proto.INT32, number=3) - - order_by = proto.Field(proto.STRING, number=4) - - filter = proto.Field(proto.STRING, number=5) - - location_id = proto.Field(proto.STRING, number=7) + parent = proto.Field(proto.STRING, number=1,) + page_token = proto.Field(proto.STRING, number=2,) + page_size = proto.Field(proto.INT32, number=3,) + order_by = proto.Field(proto.STRING, number=4,) + filter = proto.Field(proto.STRING, number=5,) + location_id = proto.Field(proto.STRING, number=7,) class ListJobTriggersResponse(proto.Message): r"""Response message for ListJobTriggers. - Attributes: job_triggers (Sequence[google.cloud.dlp_v2.types.JobTrigger]): List of triggeredJobs, up to page_size in @@ -4315,13 +3983,11 @@ def raw_page(self): return self job_triggers = proto.RepeatedField(proto.MESSAGE, number=1, message="JobTrigger",) - - next_page_token = proto.Field(proto.STRING, number=2) + next_page_token = proto.Field(proto.STRING, number=2,) class DeleteJobTriggerRequest(proto.Message): r"""Request message for DeleteJobTrigger. - Attributes: name (str): Required. Resource name of the project and the triggeredJob, @@ -4329,12 +3995,11 @@ class DeleteJobTriggerRequest(proto.Message): ``projects/dlp-test-project/jobTriggers/53234423``. """ - name = proto.Field(proto.STRING, number=1) + name = proto.Field(proto.STRING, number=1,) class InspectJobConfig(proto.Message): r"""Controls what and how to inspect for findings. - Attributes: storage_config (google.cloud.dlp_v2.types.StorageConfig): The data to scan. @@ -4352,17 +4017,13 @@ class InspectJobConfig(proto.Message): storage_config = proto.Field( proto.MESSAGE, number=1, message=storage.StorageConfig, ) - inspect_config = proto.Field(proto.MESSAGE, number=2, message="InspectConfig",) - - inspect_template_name = proto.Field(proto.STRING, number=3) - + inspect_template_name = proto.Field(proto.STRING, number=3,) actions = proto.RepeatedField(proto.MESSAGE, number=4, message="Action",) class DlpJob(proto.Message): r"""Combines all of the information about a DLP job. - Attributes: name (str): The server-assigned name. @@ -4398,48 +4059,37 @@ class JobState(proto.Enum): FAILED = 5 ACTIVE = 6 - name = proto.Field(proto.STRING, number=1) - + name = proto.Field(proto.STRING, number=1,) type_ = proto.Field(proto.ENUM, number=2, enum="DlpJobType",) - state = proto.Field(proto.ENUM, number=3, enum=JobState,) - risk_details = proto.Field( proto.MESSAGE, number=4, oneof="details", message="AnalyzeDataSourceRiskDetails", ) - inspect_details = proto.Field( proto.MESSAGE, number=5, oneof="details", message="InspectDataSourceDetails", ) - - create_time = proto.Field(proto.MESSAGE, number=6, message=timestamp.Timestamp,) - - start_time = proto.Field(proto.MESSAGE, number=7, message=timestamp.Timestamp,) - - end_time = proto.Field(proto.MESSAGE, number=8, message=timestamp.Timestamp,) - - job_trigger_name = proto.Field(proto.STRING, number=10) - + create_time = proto.Field(proto.MESSAGE, number=6, message=timestamp_pb2.Timestamp,) + start_time = proto.Field(proto.MESSAGE, number=7, message=timestamp_pb2.Timestamp,) + end_time = proto.Field(proto.MESSAGE, number=8, message=timestamp_pb2.Timestamp,) + job_trigger_name = proto.Field(proto.STRING, number=10,) errors = proto.RepeatedField(proto.MESSAGE, number=11, message="Error",) class GetDlpJobRequest(proto.Message): r"""The request message for [DlpJobs.GetDlpJob][]. - Attributes: name (str): Required. The name of the DlpJob resource. """ - name = proto.Field(proto.STRING, number=1) + name = proto.Field(proto.STRING, number=1,) class ListDlpJobsRequest(proto.Message): r"""The request message for listing DLP jobs. - Attributes: parent (str): Required. Parent resource name. @@ -4525,24 +4175,17 @@ class ListDlpJobsRequest(proto.Message): Deprecated. This field has no effect. """ - parent = proto.Field(proto.STRING, number=4) - - filter = proto.Field(proto.STRING, number=1) - - page_size = proto.Field(proto.INT32, number=2) - - page_token = proto.Field(proto.STRING, number=3) - + parent = proto.Field(proto.STRING, number=4,) + filter = proto.Field(proto.STRING, number=1,) + page_size = proto.Field(proto.INT32, number=2,) + page_token = proto.Field(proto.STRING, number=3,) type_ = proto.Field(proto.ENUM, number=5, enum="DlpJobType",) - - order_by = proto.Field(proto.STRING, number=6) - - location_id = proto.Field(proto.STRING, number=7) + order_by = proto.Field(proto.STRING, number=6,) + location_id = proto.Field(proto.STRING, number=7,) class ListDlpJobsResponse(proto.Message): r"""The response message for listing DLP jobs. - Attributes: jobs (Sequence[google.cloud.dlp_v2.types.DlpJob]): A list of DlpJobs that matches the specified @@ -4556,49 +4199,44 @@ def raw_page(self): return self jobs = proto.RepeatedField(proto.MESSAGE, number=1, message="DlpJob",) - - next_page_token = proto.Field(proto.STRING, number=2) + next_page_token = proto.Field(proto.STRING, number=2,) class CancelDlpJobRequest(proto.Message): r"""The request message for canceling a DLP job. - Attributes: name (str): Required. The name of the DlpJob resource to be cancelled. """ - name = proto.Field(proto.STRING, number=1) + name = proto.Field(proto.STRING, number=1,) class FinishDlpJobRequest(proto.Message): r"""The request message for finishing a DLP hybrid job. - Attributes: name (str): Required. The name of the DlpJob resource to be cancelled. """ - name = proto.Field(proto.STRING, number=1) + name = proto.Field(proto.STRING, number=1,) class DeleteDlpJobRequest(proto.Message): r"""The request message for deleting a DLP job. - Attributes: name (str): Required. The name of the DlpJob resource to be deleted. """ - name = proto.Field(proto.STRING, number=1) + name = proto.Field(proto.STRING, number=1,) class CreateDeidentifyTemplateRequest(proto.Message): r"""Request message for CreateDeidentifyTemplate. - Attributes: parent (str): Required. Parent resource name. @@ -4636,20 +4274,16 @@ class CreateDeidentifyTemplateRequest(proto.Message): Deprecated. This field has no effect. """ - parent = proto.Field(proto.STRING, number=1) - + parent = proto.Field(proto.STRING, number=1,) deidentify_template = proto.Field( proto.MESSAGE, number=2, message="DeidentifyTemplate", ) - - template_id = proto.Field(proto.STRING, number=3) - - location_id = proto.Field(proto.STRING, number=4) + template_id = proto.Field(proto.STRING, number=3,) + location_id = proto.Field(proto.STRING, number=4,) class UpdateDeidentifyTemplateRequest(proto.Message): r"""Request message for UpdateDeidentifyTemplate. - Attributes: name (str): Required. Resource name of organization and deidentify @@ -4662,18 +4296,17 @@ class UpdateDeidentifyTemplateRequest(proto.Message): Mask to control which fields get updated. """ - name = proto.Field(proto.STRING, number=1) - + name = proto.Field(proto.STRING, number=1,) deidentify_template = proto.Field( proto.MESSAGE, number=2, message="DeidentifyTemplate", ) - - update_mask = proto.Field(proto.MESSAGE, number=3, message=field_mask.FieldMask,) + update_mask = proto.Field( + proto.MESSAGE, number=3, message=field_mask_pb2.FieldMask, + ) class GetDeidentifyTemplateRequest(proto.Message): r"""Request message for GetDeidentifyTemplate. - Attributes: name (str): Required. Resource name of the organization and deidentify @@ -4682,12 +4315,11 @@ class GetDeidentifyTemplateRequest(proto.Message): projects/project-id/deidentifyTemplates/432452342. """ - name = proto.Field(proto.STRING, number=1) + name = proto.Field(proto.STRING, number=1,) class ListDeidentifyTemplatesRequest(proto.Message): r"""Request message for ListDeidentifyTemplates. - Attributes: parent (str): Required. Parent resource name. @@ -4739,20 +4371,15 @@ class ListDeidentifyTemplatesRequest(proto.Message): Deprecated. This field has no effect. """ - parent = proto.Field(proto.STRING, number=1) - - page_token = proto.Field(proto.STRING, number=2) - - page_size = proto.Field(proto.INT32, number=3) - - order_by = proto.Field(proto.STRING, number=4) - - location_id = proto.Field(proto.STRING, number=5) + parent = proto.Field(proto.STRING, number=1,) + page_token = proto.Field(proto.STRING, number=2,) + page_size = proto.Field(proto.INT32, number=3,) + order_by = proto.Field(proto.STRING, number=4,) + location_id = proto.Field(proto.STRING, number=5,) class ListDeidentifyTemplatesResponse(proto.Message): r"""Response message for ListDeidentifyTemplates. - Attributes: deidentify_templates (Sequence[google.cloud.dlp_v2.types.DeidentifyTemplate]): List of deidentify templates, up to page_size in @@ -4770,13 +4397,11 @@ def raw_page(self): deidentify_templates = proto.RepeatedField( proto.MESSAGE, number=1, message="DeidentifyTemplate", ) - - next_page_token = proto.Field(proto.STRING, number=2) + next_page_token = proto.Field(proto.STRING, number=2,) class DeleteDeidentifyTemplateRequest(proto.Message): r"""Request message for DeleteDeidentifyTemplate. - Attributes: name (str): Required. Resource name of the organization and deidentify @@ -4785,7 +4410,7 @@ class DeleteDeidentifyTemplateRequest(proto.Message): projects/project-id/deidentifyTemplates/432452342. """ - name = proto.Field(proto.STRING, number=1) + name = proto.Field(proto.STRING, number=1,) class LargeCustomDictionaryConfig(proto.Message): @@ -4815,11 +4440,9 @@ class LargeCustomDictionaryConfig(proto.Message): output_path = proto.Field( proto.MESSAGE, number=1, message=storage.CloudStoragePath, ) - cloud_storage_file_set = proto.Field( proto.MESSAGE, number=2, oneof="source", message=storage.CloudStorageFileSet, ) - big_query_field = proto.Field( proto.MESSAGE, number=3, oneof="source", message=storage.BigQueryField, ) @@ -4827,14 +4450,13 @@ class LargeCustomDictionaryConfig(proto.Message): class LargeCustomDictionaryStats(proto.Message): r"""Summary statistics of a custom dictionary. - Attributes: approx_num_phrases (int): Approximate number of distinct phrases in the dictionary. """ - approx_num_phrases = proto.Field(proto.INT64, number=1) + approx_num_phrases = proto.Field(proto.INT64, number=1,) class StoredInfoTypeConfig(proto.Message): @@ -4859,21 +4481,17 @@ class StoredInfoTypeConfig(proto.Message): StoredInfoType. """ - display_name = proto.Field(proto.STRING, number=1) - - description = proto.Field(proto.STRING, number=2) - + display_name = proto.Field(proto.STRING, number=1,) + description = proto.Field(proto.STRING, number=2,) large_custom_dictionary = proto.Field( proto.MESSAGE, number=3, oneof="type", message="LargeCustomDictionaryConfig", ) - dictionary = proto.Field( proto.MESSAGE, number=4, oneof="type", message=storage.CustomInfoType.Dictionary, ) - regex = proto.Field( proto.MESSAGE, number=5, oneof="type", message=storage.CustomInfoType.Regex, ) @@ -4881,7 +4499,6 @@ class StoredInfoTypeConfig(proto.Message): class StoredInfoTypeStats(proto.Message): r"""Statistics for a StoredInfoType. - Attributes: large_custom_dictionary (google.cloud.dlp_v2.types.LargeCustomDictionaryStats): StoredInfoType where findings are defined by @@ -4930,13 +4547,9 @@ class StoredInfoTypeVersion(proto.Message): """ config = proto.Field(proto.MESSAGE, number=1, message="StoredInfoTypeConfig",) - - create_time = proto.Field(proto.MESSAGE, number=2, message=timestamp.Timestamp,) - + create_time = proto.Field(proto.MESSAGE, number=2, message=timestamp_pb2.Timestamp,) state = proto.Field(proto.ENUM, number=3, enum="StoredInfoTypeState",) - errors = proto.RepeatedField(proto.MESSAGE, number=4, message="Error",) - stats = proto.Field(proto.MESSAGE, number=5, message="StoredInfoTypeStats",) @@ -4954,12 +4567,10 @@ class StoredInfoType(proto.Message): Empty if no versions are pending. """ - name = proto.Field(proto.STRING, number=1) - + name = proto.Field(proto.STRING, number=1,) current_version = proto.Field( proto.MESSAGE, number=2, message="StoredInfoTypeVersion", ) - pending_versions = proto.RepeatedField( proto.MESSAGE, number=3, message="StoredInfoTypeVersion", ) @@ -4967,7 +4578,6 @@ class StoredInfoType(proto.Message): class CreateStoredInfoTypeRequest(proto.Message): r"""Request message for CreateStoredInfoType. - Attributes: parent (str): Required. Parent resource name. @@ -5006,18 +4616,14 @@ class CreateStoredInfoTypeRequest(proto.Message): Deprecated. This field has no effect. """ - parent = proto.Field(proto.STRING, number=1) - + parent = proto.Field(proto.STRING, number=1,) config = proto.Field(proto.MESSAGE, number=2, message="StoredInfoTypeConfig",) - - stored_info_type_id = proto.Field(proto.STRING, number=3) - - location_id = proto.Field(proto.STRING, number=4) + stored_info_type_id = proto.Field(proto.STRING, number=3,) + location_id = proto.Field(proto.STRING, number=4,) class UpdateStoredInfoTypeRequest(proto.Message): r"""Request message for UpdateStoredInfoType. - Attributes: name (str): Required. Resource name of organization and storedInfoType @@ -5033,16 +4639,15 @@ class UpdateStoredInfoTypeRequest(proto.Message): Mask to control which fields get updated. """ - name = proto.Field(proto.STRING, number=1) - + name = proto.Field(proto.STRING, number=1,) config = proto.Field(proto.MESSAGE, number=2, message="StoredInfoTypeConfig",) - - update_mask = proto.Field(proto.MESSAGE, number=3, message=field_mask.FieldMask,) + update_mask = proto.Field( + proto.MESSAGE, number=3, message=field_mask_pb2.FieldMask, + ) class GetStoredInfoTypeRequest(proto.Message): r"""Request message for GetStoredInfoType. - Attributes: name (str): Required. Resource name of the organization and @@ -5051,12 +4656,11 @@ class GetStoredInfoTypeRequest(proto.Message): projects/project-id/storedInfoTypes/432452342. """ - name = proto.Field(proto.STRING, number=1) + name = proto.Field(proto.STRING, number=1,) class ListStoredInfoTypesRequest(proto.Message): r"""Request message for ListStoredInfoTypes. - Attributes: parent (str): Required. Parent resource name. @@ -5108,20 +4712,15 @@ class ListStoredInfoTypesRequest(proto.Message): Deprecated. This field has no effect. """ - parent = proto.Field(proto.STRING, number=1) - - page_token = proto.Field(proto.STRING, number=2) - - page_size = proto.Field(proto.INT32, number=3) - - order_by = proto.Field(proto.STRING, number=4) - - location_id = proto.Field(proto.STRING, number=5) + parent = proto.Field(proto.STRING, number=1,) + page_token = proto.Field(proto.STRING, number=2,) + page_size = proto.Field(proto.INT32, number=3,) + order_by = proto.Field(proto.STRING, number=4,) + location_id = proto.Field(proto.STRING, number=5,) class ListStoredInfoTypesResponse(proto.Message): r"""Response message for ListStoredInfoTypes. - Attributes: stored_info_types (Sequence[google.cloud.dlp_v2.types.StoredInfoType]): List of storedInfoTypes, up to page_size in @@ -5139,13 +4738,11 @@ def raw_page(self): stored_info_types = proto.RepeatedField( proto.MESSAGE, number=1, message="StoredInfoType", ) - - next_page_token = proto.Field(proto.STRING, number=2) + next_page_token = proto.Field(proto.STRING, number=2,) class DeleteStoredInfoTypeRequest(proto.Message): r"""Request message for DeleteStoredInfoType. - Attributes: name (str): Required. Resource name of the organization and @@ -5154,7 +4751,7 @@ class DeleteStoredInfoTypeRequest(proto.Message): projects/project-id/storedInfoTypes/432452342. """ - name = proto.Field(proto.STRING, number=1) + name = proto.Field(proto.STRING, number=1,) class HybridInspectJobTriggerRequest(proto.Message): @@ -5170,8 +4767,7 @@ class HybridInspectJobTriggerRequest(proto.Message): The item to inspect. """ - name = proto.Field(proto.STRING, number=1) - + name = proto.Field(proto.STRING, number=1,) hybrid_item = proto.Field(proto.MESSAGE, number=3, message="HybridContentItem",) @@ -5188,8 +4784,7 @@ class HybridInspectDlpJobRequest(proto.Message): The item to inspect. """ - name = proto.Field(proto.STRING, number=1) - + name = proto.Field(proto.STRING, number=1,) hybrid_item = proto.Field(proto.MESSAGE, number=3, message="HybridContentItem",) @@ -5206,7 +4801,6 @@ class HybridContentItem(proto.Message): """ item = proto.Field(proto.MESSAGE, number=1, message="ContentItem",) - finding_details = proto.Field( proto.MESSAGE, number=2, message="HybridFindingDetails", ) @@ -5214,7 +4808,6 @@ class HybridContentItem(proto.Message): class HybridFindingDetails(proto.Message): r"""Populate to associate additional data with each finding. - Attributes: container_details (google.cloud.dlp_v2.types.Container): Details about the container where the content @@ -5262,18 +4855,14 @@ class HybridFindingDetails(proto.Message): """ container_details = proto.Field(proto.MESSAGE, number=1, message="Container",) - - file_offset = proto.Field(proto.INT64, number=2) - - row_offset = proto.Field(proto.INT64, number=3) - + file_offset = proto.Field(proto.INT64, number=2,) + row_offset = proto.Field(proto.INT64, number=3,) table_options = proto.Field(proto.MESSAGE, number=4, message=storage.TableOptions,) - - labels = proto.MapField(proto.STRING, proto.STRING, number=5) + labels = proto.MapField(proto.STRING, proto.STRING, number=5,) class HybridInspectResponse(proto.Message): - r"""Quota exceeded errors will be thrown once quota has been met.""" + r"""Quota exceeded errors will be thrown once quota has been met. """ __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/dlp_v2/types/storage.py b/google/cloud/dlp_v2/types/storage.py index e76efd06..9bd2fd62 100644 --- a/google/cloud/dlp_v2/types/storage.py +++ b/google/cloud/dlp_v2/types/storage.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,11 +13,9 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import proto # type: ignore - -from google.protobuf import timestamp_pb2 as timestamp # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore __protobuf__ = proto.module( @@ -82,7 +79,6 @@ class FileType(proto.Enum): class InfoType(proto.Message): r"""Type of information detected by the API. - Attributes: name (str): Name of the information type. Either a name of your choosing @@ -93,12 +89,11 @@ class InfoType(proto.Message): the pattern ``[A-Za-z0-9$-_]{1,64}``. """ - name = proto.Field(proto.STRING, number=1) + name = proto.Field(proto.STRING, number=1,) class StoredType(proto.Message): r"""A reference to a StoredInfoType to use with scanning. - Attributes: name (str): Resource name of the requested ``StoredInfoType``, for @@ -111,9 +106,8 @@ class StoredType(proto.Message): Output-only field, populated by the system. """ - name = proto.Field(proto.STRING, number=1) - - create_time = proto.Field(proto.MESSAGE, number=2, message=timestamp.Timestamp,) + name = proto.Field(proto.STRING, number=1,) + create_time = proto.Field(proto.MESSAGE, number=2, message=timestamp_pb2.Timestamp,) class CustomInfoType(proto.Message): @@ -209,7 +203,7 @@ class WordList(proto.Message): [required] """ - words = proto.RepeatedField(proto.STRING, number=1) + words = proto.RepeatedField(proto.STRING, number=1,) word_list = proto.Field( proto.MESSAGE, @@ -217,14 +211,12 @@ class WordList(proto.Message): oneof="source", message="CustomInfoType.Dictionary.WordList", ) - cloud_storage_path = proto.Field( proto.MESSAGE, number=3, oneof="source", message="CloudStoragePath", ) class Regex(proto.Message): r"""Message defining a custom regular expression. - Attributes: pattern (str): Pattern defining the regular expression. Its @@ -238,9 +230,8 @@ class Regex(proto.Message): is returned. No more than 3 may be included. """ - pattern = proto.Field(proto.STRING, number=1) - - group_indexes = proto.RepeatedField(proto.INT32, number=2) + pattern = proto.Field(proto.STRING, number=1,) + group_indexes = proto.RepeatedField(proto.INT32, number=2,) class SurrogateType(proto.Message): r"""Message for detecting output from deidentification transformations @@ -251,7 +242,7 @@ class SurrogateType(proto.Message): should be used in conjunction with a field on the transformation such as ``surrogate_info_type``. This CustomInfoType does not support the use of ``detection_rules``. - """ + """ class DetectionRule(proto.Message): r"""Deprecated; use ``InspectionRuleSet`` instead. Rule for modifying a @@ -277,9 +268,8 @@ class Proximity(proto.Message): consider. """ - window_before = proto.Field(proto.INT32, number=1) - - window_after = proto.Field(proto.INT32, number=2) + window_before = proto.Field(proto.INT32, number=1,) + window_after = proto.Field(proto.INT32, number=2,) class LikelihoodAdjustment(proto.Message): r"""Message for specifying an adjustment to the likelihood of a @@ -304,8 +294,9 @@ class LikelihoodAdjustment(proto.Message): fixed_likelihood = proto.Field( proto.ENUM, number=1, oneof="adjustment", enum="Likelihood", ) - - relative_likelihood = proto.Field(proto.INT32, number=2, oneof="adjustment") + relative_likelihood = proto.Field( + proto.INT32, number=2, oneof="adjustment", + ) class HotwordRule(proto.Message): r"""The rule that adjusts the likelihood of findings within a @@ -336,13 +327,11 @@ class HotwordRule(proto.Message): hotword_regex = proto.Field( proto.MESSAGE, number=1, message="CustomInfoType.Regex", ) - proximity = proto.Field( proto.MESSAGE, number=2, message="CustomInfoType.DetectionRule.Proximity", ) - likelihood_adjustment = proto.Field( proto.MESSAGE, number=3, @@ -357,37 +346,29 @@ class HotwordRule(proto.Message): ) info_type = proto.Field(proto.MESSAGE, number=1, message="InfoType",) - likelihood = proto.Field(proto.ENUM, number=6, enum="Likelihood",) - dictionary = proto.Field(proto.MESSAGE, number=2, oneof="type", message=Dictionary,) - regex = proto.Field(proto.MESSAGE, number=3, oneof="type", message=Regex,) - surrogate_type = proto.Field( proto.MESSAGE, number=4, oneof="type", message=SurrogateType, ) - stored_type = proto.Field( proto.MESSAGE, number=5, oneof="type", message="StoredType", ) - detection_rules = proto.RepeatedField( proto.MESSAGE, number=7, message=DetectionRule, ) - exclusion_type = proto.Field(proto.ENUM, number=8, enum=ExclusionType,) class FieldId(proto.Message): r"""General identifier of a data field in a storage service. - Attributes: name (str): Name describing the field. """ - name = proto.Field(proto.STRING, number=1) + name = proto.Field(proto.STRING, number=1,) class PartitionId(proto.Message): @@ -407,25 +388,22 @@ class PartitionId(proto.Message): which the entities belong. """ - project_id = proto.Field(proto.STRING, number=2) - - namespace_id = proto.Field(proto.STRING, number=4) + project_id = proto.Field(proto.STRING, number=2,) + namespace_id = proto.Field(proto.STRING, number=4,) class KindExpression(proto.Message): r"""A representation of a Datastore kind. - Attributes: name (str): The name of the kind. """ - name = proto.Field(proto.STRING, number=1) + name = proto.Field(proto.STRING, number=1,) class DatastoreOptions(proto.Message): r"""Options defining a data set within Google Cloud Datastore. - Attributes: partition_id (google.cloud.dlp_v2.types.PartitionId): A partition ID identifies a grouping of @@ -437,7 +415,6 @@ class DatastoreOptions(proto.Message): """ partition_id = proto.Field(proto.MESSAGE, number=1, message="PartitionId",) - kind = proto.Field(proto.MESSAGE, number=2, message="KindExpression",) @@ -503,11 +480,9 @@ class CloudStorageRegexFileSet(proto.Message): GitHub. """ - bucket_name = proto.Field(proto.STRING, number=1) - - include_regex = proto.RepeatedField(proto.STRING, number=2) - - exclude_regex = proto.RepeatedField(proto.STRING, number=3) + bucket_name = proto.Field(proto.STRING, number=1,) + include_regex = proto.RepeatedField(proto.STRING, number=2,) + exclude_regex = proto.RepeatedField(proto.STRING, number=3,) class CloudStorageOptions(proto.Message): @@ -558,7 +533,6 @@ class SampleMethod(proto.Enum): class FileSet(proto.Message): r"""Set of files to scan. - Attributes: url (str): The Cloud Storage url of the file(s) to scan, in the format @@ -578,52 +552,43 @@ class FileSet(proto.Message): ``url`` or ``regex_file_set`` must be set. """ - url = proto.Field(proto.STRING, number=1) - + url = proto.Field(proto.STRING, number=1,) regex_file_set = proto.Field( proto.MESSAGE, number=2, message="CloudStorageRegexFileSet", ) file_set = proto.Field(proto.MESSAGE, number=1, message=FileSet,) - - bytes_limit_per_file = proto.Field(proto.INT64, number=4) - - bytes_limit_per_file_percent = proto.Field(proto.INT32, number=8) - + bytes_limit_per_file = proto.Field(proto.INT64, number=4,) + bytes_limit_per_file_percent = proto.Field(proto.INT32, number=8,) file_types = proto.RepeatedField(proto.ENUM, number=5, enum="FileType",) - sample_method = proto.Field(proto.ENUM, number=6, enum=SampleMethod,) - - files_limit_percent = proto.Field(proto.INT32, number=7) + files_limit_percent = proto.Field(proto.INT32, number=7,) class CloudStorageFileSet(proto.Message): r"""Message representing a set of files in Cloud Storage. - Attributes: url (str): The url, in the format ``gs:///``. Trailing wildcard in the path is allowed. """ - url = proto.Field(proto.STRING, number=1) + url = proto.Field(proto.STRING, number=1,) class CloudStoragePath(proto.Message): r"""Message representing a single file or path in Cloud Storage. - Attributes: path (str): A url representing a file or path (no wildcards) in Cloud Storage. Example: gs://[BUCKET_NAME]/dictionary.txt """ - path = proto.Field(proto.STRING, number=1) + path = proto.Field(proto.STRING, number=1,) class BigQueryOptions(proto.Message): r"""Options defining BigQuery table and row identifiers. - Attributes: table_reference (google.cloud.dlp_v2.types.BigQueryTable): Complete BigQuery table reference. @@ -665,23 +630,17 @@ class SampleMethod(proto.Enum): RANDOM_START = 2 table_reference = proto.Field(proto.MESSAGE, number=1, message="BigQueryTable",) - identifying_fields = proto.RepeatedField( proto.MESSAGE, number=2, message="FieldId", ) - - rows_limit = proto.Field(proto.INT64, number=3) - - rows_limit_percent = proto.Field(proto.INT32, number=6) - + rows_limit = proto.Field(proto.INT64, number=3,) + rows_limit_percent = proto.Field(proto.INT32, number=6,) sample_method = proto.Field(proto.ENUM, number=4, enum=SampleMethod,) - excluded_fields = proto.RepeatedField(proto.MESSAGE, number=5, message="FieldId",) class StorageConfig(proto.Message): r"""Shared message indicating Cloud storage type. - Attributes: datastore_options (google.cloud.dlp_v2.types.DatastoreOptions): Google Cloud Datastore options. @@ -742,30 +701,27 @@ class TimespanConfig(proto.Message): of the execution of the last run of the JobTrigger. """ - start_time = proto.Field(proto.MESSAGE, number=1, message=timestamp.Timestamp,) - - end_time = proto.Field(proto.MESSAGE, number=2, message=timestamp.Timestamp,) - + start_time = proto.Field( + proto.MESSAGE, number=1, message=timestamp_pb2.Timestamp, + ) + end_time = proto.Field( + proto.MESSAGE, number=2, message=timestamp_pb2.Timestamp, + ) timestamp_field = proto.Field(proto.MESSAGE, number=3, message="FieldId",) - - enable_auto_population_of_timespan_config = proto.Field(proto.BOOL, number=4) + enable_auto_population_of_timespan_config = proto.Field(proto.BOOL, number=4,) datastore_options = proto.Field( proto.MESSAGE, number=2, oneof="type", message="DatastoreOptions", ) - cloud_storage_options = proto.Field( proto.MESSAGE, number=3, oneof="type", message="CloudStorageOptions", ) - big_query_options = proto.Field( proto.MESSAGE, number=4, oneof="type", message="BigQueryOptions", ) - hybrid_options = proto.Field( proto.MESSAGE, number=9, oneof="type", message="HybridOptions", ) - timespan_config = proto.Field(proto.MESSAGE, number=6, message=TimespanConfig,) @@ -813,18 +769,14 @@ class HybridOptions(proto.Message): the columns that are primary keys. """ - description = proto.Field(proto.STRING, number=1) - - required_finding_label_keys = proto.RepeatedField(proto.STRING, number=2) - - labels = proto.MapField(proto.STRING, proto.STRING, number=3) - + description = proto.Field(proto.STRING, number=1,) + required_finding_label_keys = proto.RepeatedField(proto.STRING, number=2,) + labels = proto.MapField(proto.STRING, proto.STRING, number=3,) table_options = proto.Field(proto.MESSAGE, number=4, message="TableOptions",) class BigQueryKey(proto.Message): r"""Row key for identifying a record in BigQuery table. - Attributes: table_reference (google.cloud.dlp_v2.types.BigQueryTable): Complete BigQuery table reference. @@ -838,13 +790,11 @@ class BigQueryKey(proto.Message): """ table_reference = proto.Field(proto.MESSAGE, number=1, message="BigQueryTable",) - - row_number = proto.Field(proto.INT64, number=2) + row_number = proto.Field(proto.INT64, number=2,) class DatastoreKey(proto.Message): r"""Record key for a finding in Cloud Datastore. - Attributes: entity_key (google.cloud.dlp_v2.types.Key): Datastore entity key. @@ -901,14 +851,11 @@ class PathElement(proto.Message): when UTF-8 encoded. Cannot be ``""``. """ - kind = proto.Field(proto.STRING, number=1) - - id = proto.Field(proto.INT64, number=2, oneof="id_type") - - name = proto.Field(proto.STRING, number=3, oneof="id_type") + kind = proto.Field(proto.STRING, number=1,) + id = proto.Field(proto.INT64, number=2, oneof="id_type",) + name = proto.Field(proto.STRING, number=3, oneof="id_type",) partition_id = proto.Field(proto.MESSAGE, number=1, message="PartitionId",) - path = proto.RepeatedField(proto.MESSAGE, number=2, message=PathElement,) @@ -930,12 +877,10 @@ class RecordKey(proto.Message): datastore_key = proto.Field( proto.MESSAGE, number=2, oneof="type", message="DatastoreKey", ) - big_query_key = proto.Field( proto.MESSAGE, number=3, oneof="type", message="BigQueryKey", ) - - id_values = proto.RepeatedField(proto.STRING, number=5) + id_values = proto.RepeatedField(proto.STRING, number=5,) class BigQueryTable(proto.Message): @@ -956,16 +901,13 @@ class BigQueryTable(proto.Message): Name of the table. """ - project_id = proto.Field(proto.STRING, number=1) - - dataset_id = proto.Field(proto.STRING, number=2) - - table_id = proto.Field(proto.STRING, number=3) + project_id = proto.Field(proto.STRING, number=1,) + dataset_id = proto.Field(proto.STRING, number=2,) + table_id = proto.Field(proto.STRING, number=3,) class BigQueryField(proto.Message): r"""Message defining a field of a BigQuery table. - Attributes: table (google.cloud.dlp_v2.types.BigQueryTable): Source table of the field. @@ -974,7 +916,6 @@ class BigQueryField(proto.Message): """ table = proto.Field(proto.MESSAGE, number=1, message="BigQueryTable",) - field = proto.Field(proto.MESSAGE, number=2, message="FieldId",) @@ -997,7 +938,6 @@ class EntityId(proto.Message): class TableOptions(proto.Message): r"""Instructions regarding the table content being inspected. - Attributes: identifying_fields (Sequence[google.cloud.dlp_v2.types.FieldId]): The columns that are the primary keys for diff --git a/scripts/fixup_dlp_v2_keywords.py b/scripts/fixup_dlp_v2_keywords.py index f1419971..82a5fabe 100644 --- a/scripts/fixup_dlp_v2_keywords.py +++ b/scripts/fixup_dlp_v2_keywords.py @@ -1,6 +1,5 @@ #! /usr/bin/env python3 # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -15,7 +14,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import argparse import os import libcst as cst @@ -41,41 +39,40 @@ def partition( class dlpCallTransformer(cst.CSTTransformer): CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'activate_job_trigger': ('name', ), - 'cancel_dlp_job': ('name', ), - 'create_deidentify_template': ('parent', 'deidentify_template', 'template_id', 'location_id', ), - 'create_dlp_job': ('parent', 'inspect_job', 'risk_job', 'job_id', 'location_id', ), - 'create_inspect_template': ('parent', 'inspect_template', 'template_id', 'location_id', ), - 'create_job_trigger': ('parent', 'job_trigger', 'trigger_id', 'location_id', ), - 'create_stored_info_type': ('parent', 'config', 'stored_info_type_id', 'location_id', ), - 'deidentify_content': ('parent', 'deidentify_config', 'inspect_config', 'item', 'inspect_template_name', 'deidentify_template_name', 'location_id', ), - 'delete_deidentify_template': ('name', ), - 'delete_dlp_job': ('name', ), - 'delete_inspect_template': ('name', ), - 'delete_job_trigger': ('name', ), - 'delete_stored_info_type': ('name', ), - 'finish_dlp_job': ('name', ), - 'get_deidentify_template': ('name', ), - 'get_dlp_job': ('name', ), - 'get_inspect_template': ('name', ), - 'get_job_trigger': ('name', ), - 'get_stored_info_type': ('name', ), - 'hybrid_inspect_dlp_job': ('name', 'hybrid_item', ), - 'hybrid_inspect_job_trigger': ('name', 'hybrid_item', ), - 'inspect_content': ('parent', 'inspect_config', 'item', 'inspect_template_name', 'location_id', ), - 'list_deidentify_templates': ('parent', 'page_token', 'page_size', 'order_by', 'location_id', ), - 'list_dlp_jobs': ('parent', 'filter', 'page_size', 'page_token', 'type_', 'order_by', 'location_id', ), - 'list_info_types': ('parent', 'language_code', 'filter', 'location_id', ), - 'list_inspect_templates': ('parent', 'page_token', 'page_size', 'order_by', 'location_id', ), - 'list_job_triggers': ('parent', 'page_token', 'page_size', 'order_by', 'filter', 'location_id', ), - 'list_stored_info_types': ('parent', 'page_token', 'page_size', 'order_by', 'location_id', ), - 'redact_image': ('parent', 'location_id', 'inspect_config', 'image_redaction_configs', 'include_findings', 'byte_item', ), - 'reidentify_content': ('parent', 'reidentify_config', 'inspect_config', 'item', 'inspect_template_name', 'reidentify_template_name', 'location_id', ), - 'update_deidentify_template': ('name', 'deidentify_template', 'update_mask', ), - 'update_inspect_template': ('name', 'inspect_template', 'update_mask', ), - 'update_job_trigger': ('name', 'job_trigger', 'update_mask', ), - 'update_stored_info_type': ('name', 'config', 'update_mask', ), - + 'activate_job_trigger': ('name', ), + 'cancel_dlp_job': ('name', ), + 'create_deidentify_template': ('parent', 'deidentify_template', 'template_id', 'location_id', ), + 'create_dlp_job': ('parent', 'inspect_job', 'risk_job', 'job_id', 'location_id', ), + 'create_inspect_template': ('parent', 'inspect_template', 'template_id', 'location_id', ), + 'create_job_trigger': ('parent', 'job_trigger', 'trigger_id', 'location_id', ), + 'create_stored_info_type': ('parent', 'config', 'stored_info_type_id', 'location_id', ), + 'deidentify_content': ('parent', 'deidentify_config', 'inspect_config', 'item', 'inspect_template_name', 'deidentify_template_name', 'location_id', ), + 'delete_deidentify_template': ('name', ), + 'delete_dlp_job': ('name', ), + 'delete_inspect_template': ('name', ), + 'delete_job_trigger': ('name', ), + 'delete_stored_info_type': ('name', ), + 'finish_dlp_job': ('name', ), + 'get_deidentify_template': ('name', ), + 'get_dlp_job': ('name', ), + 'get_inspect_template': ('name', ), + 'get_job_trigger': ('name', ), + 'get_stored_info_type': ('name', ), + 'hybrid_inspect_dlp_job': ('name', 'hybrid_item', ), + 'hybrid_inspect_job_trigger': ('name', 'hybrid_item', ), + 'inspect_content': ('parent', 'inspect_config', 'item', 'inspect_template_name', 'location_id', ), + 'list_deidentify_templates': ('parent', 'page_token', 'page_size', 'order_by', 'location_id', ), + 'list_dlp_jobs': ('parent', 'filter', 'page_size', 'page_token', 'type_', 'order_by', 'location_id', ), + 'list_info_types': ('parent', 'language_code', 'filter', 'location_id', ), + 'list_inspect_templates': ('parent', 'page_token', 'page_size', 'order_by', 'location_id', ), + 'list_job_triggers': ('parent', 'page_token', 'page_size', 'order_by', 'filter', 'location_id', ), + 'list_stored_info_types': ('parent', 'page_token', 'page_size', 'order_by', 'location_id', ), + 'redact_image': ('parent', 'location_id', 'inspect_config', 'image_redaction_configs', 'include_findings', 'byte_item', ), + 'reidentify_content': ('parent', 'reidentify_config', 'inspect_config', 'item', 'inspect_template_name', 'reidentify_template_name', 'location_id', ), + 'update_deidentify_template': ('name', 'deidentify_template', 'update_mask', ), + 'update_inspect_template': ('name', 'inspect_template', 'update_mask', ), + 'update_job_trigger': ('name', 'job_trigger', 'update_mask', ), + 'update_stored_info_type': ('name', 'config', 'update_mask', ), } def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: @@ -106,7 +103,7 @@ def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: value=cst.Dict([ cst.DictElement( cst.SimpleString("'{}'".format(name)), - cst.Element(value=arg.value) +cst.Element(value=arg.value) ) # Note: the args + kwargs looks silly, but keep in mind that # the control parameters had to be stripped out, and that diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 00000000..4de65971 --- /dev/null +++ b/tests/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/tests/unit/__init__.py b/tests/unit/__init__.py new file mode 100644 index 00000000..4de65971 --- /dev/null +++ b/tests/unit/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/tests/unit/gapic/__init__.py b/tests/unit/gapic/__init__.py new file mode 100644 index 00000000..4de65971 --- /dev/null +++ b/tests/unit/gapic/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/tests/unit/gapic/dlp_v2/__init__.py b/tests/unit/gapic/dlp_v2/__init__.py index 42ffdf2b..4de65971 100644 --- a/tests/unit/gapic/dlp_v2/__init__.py +++ b/tests/unit/gapic/dlp_v2/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/unit/gapic/dlp_v2/test_dlp_service.py b/tests/unit/gapic/dlp_v2/test_dlp_service.py index 35a74f2f..b1df7e4f 100644 --- a/tests/unit/gapic/dlp_v2/test_dlp_service.py +++ b/tests/unit/gapic/dlp_v2/test_dlp_service.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,9 +13,9 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import os import mock +import packaging.version import grpc from grpc.experimental import aio @@ -24,29 +23,57 @@ import pytest from proto.marshal.rules.dates import DurationRule, TimestampRule -from google import auth + from google.api_core import client_options -from google.api_core import exceptions +from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async -from google.auth import credentials +from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.dlp_v2.services.dlp_service import DlpServiceAsyncClient from google.cloud.dlp_v2.services.dlp_service import DlpServiceClient from google.cloud.dlp_v2.services.dlp_service import pagers from google.cloud.dlp_v2.services.dlp_service import transports +from google.cloud.dlp_v2.services.dlp_service.transports.base import _API_CORE_VERSION +from google.cloud.dlp_v2.services.dlp_service.transports.base import ( + _GOOGLE_AUTH_VERSION, +) from google.cloud.dlp_v2.types import dlp from google.cloud.dlp_v2.types import storage from google.oauth2 import service_account -from google.protobuf import any_pb2 as gp_any # type: ignore -from google.protobuf import duration_pb2 as duration # type: ignore -from google.protobuf import field_mask_pb2 as field_mask # type: ignore -from google.protobuf import timestamp_pb2 as timestamp # type: ignore -from google.rpc import status_pb2 as status # type: ignore -from google.type import date_pb2 as date # type: ignore -from google.type import dayofweek_pb2 as dayofweek # type: ignore -from google.type import timeofday_pb2 as timeofday # type: ignore +from google.protobuf import any_pb2 # type: ignore +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore +from google.type import date_pb2 # type: ignore +from google.type import dayofweek_pb2 # type: ignore +from google.type import timeofday_pb2 # type: ignore +import google.auth + + +# TODO(busunkim): Once google-api-core >= 1.26.0 is required: +# - Delete all the api-core and auth "less than" test cases +# - Delete these pytest markers (Make the "greater than or equal to" tests the default). +requires_google_auth_lt_1_25_0 = pytest.mark.skipif( + packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), + reason="This test requires google-auth < 1.25.0", +) +requires_google_auth_gte_1_25_0 = pytest.mark.skipif( + packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), + reason="This test requires google-auth >= 1.25.0", +) + +requires_api_core_lt_1_26_0 = pytest.mark.skipif( + packaging.version.parse(_API_CORE_VERSION) >= packaging.version.parse("1.26.0"), + reason="This test requires google-api-core < 1.26.0", +) + +requires_api_core_gte_1_26_0 = pytest.mark.skipif( + packaging.version.parse(_API_CORE_VERSION) < packaging.version.parse("1.26.0"), + reason="This test requires google-api-core >= 1.26.0", +) def client_cert_source_callback(): @@ -92,7 +119,7 @@ def test__get_default_mtls_endpoint(): @pytest.mark.parametrize("client_class", [DlpServiceClient, DlpServiceAsyncClient,]) def test_dlp_service_client_from_service_account_info(client_class): - creds = credentials.AnonymousCredentials() + creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_info" ) as factory: @@ -107,7 +134,7 @@ def test_dlp_service_client_from_service_account_info(client_class): @pytest.mark.parametrize("client_class", [DlpServiceClient, DlpServiceAsyncClient,]) def test_dlp_service_client_from_service_account_file(client_class): - creds = credentials.AnonymousCredentials() + creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_file" ) as factory: @@ -158,7 +185,7 @@ def test_dlp_service_client_client_options( ): # Check that if channel is provided we won't create a new one. with mock.patch.object(DlpServiceClient, "get_transport_class") as gtc: - transport = transport_class(credentials=credentials.AnonymousCredentials()) + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) client = client_class(transport=transport) gtc.assert_not_called() @@ -442,7 +469,7 @@ def test_inspect_content( transport: str = "grpc", request_type=dlp.InspectContentRequest ): client = DlpServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -453,17 +480,14 @@ def test_inspect_content( with mock.patch.object(type(client.transport.inspect_content), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = dlp.InspectContentResponse() - response = client.inspect_content(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == dlp.InspectContentRequest() # Establish that the response is the type that we expect. - assert isinstance(response, dlp.InspectContentResponse) @@ -475,7 +499,7 @@ def test_inspect_content_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DlpServiceClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -483,7 +507,6 @@ def test_inspect_content_empty_call(): client.inspect_content() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == dlp.InspectContentRequest() @@ -492,7 +515,7 @@ async def test_inspect_content_async( transport: str = "grpc_asyncio", request_type=dlp.InspectContentRequest ): client = DlpServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -505,13 +528,11 @@ async def test_inspect_content_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( dlp.InspectContentResponse() ) - response = await client.inspect_content(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == dlp.InspectContentRequest() # Establish that the response is the type that we expect. @@ -524,17 +545,17 @@ async def test_inspect_content_async_from_dict(): def test_inspect_content_field_headers(): - client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = dlp.InspectContentRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.inspect_content), "__call__") as call: call.return_value = dlp.InspectContentResponse() - client.inspect_content(request) # Establish that the underlying gRPC stub method was called. @@ -549,11 +570,12 @@ def test_inspect_content_field_headers(): @pytest.mark.asyncio async def test_inspect_content_field_headers_async(): - client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = dlp.InspectContentRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -561,7 +583,6 @@ async def test_inspect_content_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( dlp.InspectContentResponse() ) - await client.inspect_content(request) # Establish that the underlying gRPC stub method was called. @@ -576,7 +597,7 @@ async def test_inspect_content_field_headers_async(): def test_redact_image(transport: str = "grpc", request_type=dlp.RedactImageRequest): client = DlpServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -590,21 +611,16 @@ def test_redact_image(transport: str = "grpc", request_type=dlp.RedactImageReque redacted_image=b"redacted_image_blob", extracted_text="extracted_text_value", ) - response = client.redact_image(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == dlp.RedactImageRequest() # Establish that the response is the type that we expect. - assert isinstance(response, dlp.RedactImageResponse) - assert response.redacted_image == b"redacted_image_blob" - assert response.extracted_text == "extracted_text_value" @@ -616,7 +632,7 @@ def test_redact_image_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DlpServiceClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -624,7 +640,6 @@ def test_redact_image_empty_call(): client.redact_image() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == dlp.RedactImageRequest() @@ -633,7 +648,7 @@ async def test_redact_image_async( transport: str = "grpc_asyncio", request_type=dlp.RedactImageRequest ): client = DlpServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -649,20 +664,16 @@ async def test_redact_image_async( extracted_text="extracted_text_value", ) ) - response = await client.redact_image(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == dlp.RedactImageRequest() # Establish that the response is the type that we expect. assert isinstance(response, dlp.RedactImageResponse) - assert response.redacted_image == b"redacted_image_blob" - assert response.extracted_text == "extracted_text_value" @@ -672,17 +683,17 @@ async def test_redact_image_async_from_dict(): def test_redact_image_field_headers(): - client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = dlp.RedactImageRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.redact_image), "__call__") as call: call.return_value = dlp.RedactImageResponse() - client.redact_image(request) # Establish that the underlying gRPC stub method was called. @@ -697,11 +708,12 @@ def test_redact_image_field_headers(): @pytest.mark.asyncio async def test_redact_image_field_headers_async(): - client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = dlp.RedactImageRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -709,7 +721,6 @@ async def test_redact_image_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( dlp.RedactImageResponse() ) - await client.redact_image(request) # Establish that the underlying gRPC stub method was called. @@ -726,7 +737,7 @@ def test_deidentify_content( transport: str = "grpc", request_type=dlp.DeidentifyContentRequest ): client = DlpServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -739,17 +750,14 @@ def test_deidentify_content( ) as call: # Designate an appropriate return value for the call. call.return_value = dlp.DeidentifyContentResponse() - response = client.deidentify_content(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == dlp.DeidentifyContentRequest() # Establish that the response is the type that we expect. - assert isinstance(response, dlp.DeidentifyContentResponse) @@ -761,7 +769,7 @@ def test_deidentify_content_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DlpServiceClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -771,7 +779,6 @@ def test_deidentify_content_empty_call(): client.deidentify_content() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == dlp.DeidentifyContentRequest() @@ -780,7 +787,7 @@ async def test_deidentify_content_async( transport: str = "grpc_asyncio", request_type=dlp.DeidentifyContentRequest ): client = DlpServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -795,13 +802,11 @@ async def test_deidentify_content_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( dlp.DeidentifyContentResponse() ) - response = await client.deidentify_content(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == dlp.DeidentifyContentRequest() # Establish that the response is the type that we expect. @@ -814,11 +819,12 @@ async def test_deidentify_content_async_from_dict(): def test_deidentify_content_field_headers(): - client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = dlp.DeidentifyContentRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -826,7 +832,6 @@ def test_deidentify_content_field_headers(): type(client.transport.deidentify_content), "__call__" ) as call: call.return_value = dlp.DeidentifyContentResponse() - client.deidentify_content(request) # Establish that the underlying gRPC stub method was called. @@ -841,11 +846,12 @@ def test_deidentify_content_field_headers(): @pytest.mark.asyncio async def test_deidentify_content_field_headers_async(): - client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = dlp.DeidentifyContentRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -855,7 +861,6 @@ async def test_deidentify_content_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( dlp.DeidentifyContentResponse() ) - await client.deidentify_content(request) # Establish that the underlying gRPC stub method was called. @@ -872,7 +877,7 @@ def test_reidentify_content( transport: str = "grpc", request_type=dlp.ReidentifyContentRequest ): client = DlpServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -885,17 +890,14 @@ def test_reidentify_content( ) as call: # Designate an appropriate return value for the call. call.return_value = dlp.ReidentifyContentResponse() - response = client.reidentify_content(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == dlp.ReidentifyContentRequest() # Establish that the response is the type that we expect. - assert isinstance(response, dlp.ReidentifyContentResponse) @@ -907,7 +909,7 @@ def test_reidentify_content_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DlpServiceClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -917,7 +919,6 @@ def test_reidentify_content_empty_call(): client.reidentify_content() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == dlp.ReidentifyContentRequest() @@ -926,7 +927,7 @@ async def test_reidentify_content_async( transport: str = "grpc_asyncio", request_type=dlp.ReidentifyContentRequest ): client = DlpServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -941,13 +942,11 @@ async def test_reidentify_content_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( dlp.ReidentifyContentResponse() ) - response = await client.reidentify_content(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == dlp.ReidentifyContentRequest() # Establish that the response is the type that we expect. @@ -960,11 +959,12 @@ async def test_reidentify_content_async_from_dict(): def test_reidentify_content_field_headers(): - client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = dlp.ReidentifyContentRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -972,7 +972,6 @@ def test_reidentify_content_field_headers(): type(client.transport.reidentify_content), "__call__" ) as call: call.return_value = dlp.ReidentifyContentResponse() - client.reidentify_content(request) # Establish that the underlying gRPC stub method was called. @@ -987,11 +986,12 @@ def test_reidentify_content_field_headers(): @pytest.mark.asyncio async def test_reidentify_content_field_headers_async(): - client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = dlp.ReidentifyContentRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1001,7 +1001,6 @@ async def test_reidentify_content_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( dlp.ReidentifyContentResponse() ) - await client.reidentify_content(request) # Establish that the underlying gRPC stub method was called. @@ -1018,7 +1017,7 @@ def test_list_info_types( transport: str = "grpc", request_type=dlp.ListInfoTypesRequest ): client = DlpServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1029,17 +1028,14 @@ def test_list_info_types( with mock.patch.object(type(client.transport.list_info_types), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = dlp.ListInfoTypesResponse() - response = client.list_info_types(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == dlp.ListInfoTypesRequest() # Establish that the response is the type that we expect. - assert isinstance(response, dlp.ListInfoTypesResponse) @@ -1051,7 +1047,7 @@ def test_list_info_types_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DlpServiceClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1059,7 +1055,6 @@ def test_list_info_types_empty_call(): client.list_info_types() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == dlp.ListInfoTypesRequest() @@ -1068,7 +1063,7 @@ async def test_list_info_types_async( transport: str = "grpc_asyncio", request_type=dlp.ListInfoTypesRequest ): client = DlpServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1081,13 +1076,11 @@ async def test_list_info_types_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( dlp.ListInfoTypesResponse() ) - response = await client.list_info_types(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == dlp.ListInfoTypesRequest() # Establish that the response is the type that we expect. @@ -1100,13 +1093,12 @@ async def test_list_info_types_async_from_dict(): def test_list_info_types_flattened(): - client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_info_types), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = dlp.ListInfoTypesResponse() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.list_info_types(parent="parent_value",) @@ -1115,12 +1107,11 @@ def test_list_info_types_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" def test_list_info_types_flattened_error(): - client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1132,7 +1123,7 @@ def test_list_info_types_flattened_error(): @pytest.mark.asyncio async def test_list_info_types_flattened_async(): - client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_info_types), "__call__") as call: @@ -1150,13 +1141,12 @@ async def test_list_info_types_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" @pytest.mark.asyncio async def test_list_info_types_flattened_error_async(): - client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1170,7 +1160,7 @@ def test_create_inspect_template( transport: str = "grpc", request_type=dlp.CreateInspectTemplateRequest ): client = DlpServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1187,23 +1177,17 @@ def test_create_inspect_template( display_name="display_name_value", description="description_value", ) - response = client.create_inspect_template(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == dlp.CreateInspectTemplateRequest() # Establish that the response is the type that we expect. - assert isinstance(response, dlp.InspectTemplate) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" @@ -1215,7 +1199,7 @@ def test_create_inspect_template_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DlpServiceClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1225,7 +1209,6 @@ def test_create_inspect_template_empty_call(): client.create_inspect_template() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == dlp.CreateInspectTemplateRequest() @@ -1234,7 +1217,7 @@ async def test_create_inspect_template_async( transport: str = "grpc_asyncio", request_type=dlp.CreateInspectTemplateRequest ): client = DlpServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1253,22 +1236,17 @@ async def test_create_inspect_template_async( description="description_value", ) ) - response = await client.create_inspect_template(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == dlp.CreateInspectTemplateRequest() # Establish that the response is the type that we expect. assert isinstance(response, dlp.InspectTemplate) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" @@ -1278,11 +1256,12 @@ async def test_create_inspect_template_async_from_dict(): def test_create_inspect_template_field_headers(): - client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = dlp.CreateInspectTemplateRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1290,7 +1269,6 @@ def test_create_inspect_template_field_headers(): type(client.transport.create_inspect_template), "__call__" ) as call: call.return_value = dlp.InspectTemplate() - client.create_inspect_template(request) # Establish that the underlying gRPC stub method was called. @@ -1305,11 +1283,12 @@ def test_create_inspect_template_field_headers(): @pytest.mark.asyncio async def test_create_inspect_template_field_headers_async(): - client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = dlp.CreateInspectTemplateRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1317,7 +1296,6 @@ async def test_create_inspect_template_field_headers_async(): type(client.transport.create_inspect_template), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.InspectTemplate()) - await client.create_inspect_template(request) # Establish that the underlying gRPC stub method was called. @@ -1331,7 +1309,7 @@ async def test_create_inspect_template_field_headers_async(): def test_create_inspect_template_flattened(): - client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1339,7 +1317,6 @@ def test_create_inspect_template_flattened(): ) as call: # Designate an appropriate return value for the call. call.return_value = dlp.InspectTemplate() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.create_inspect_template( @@ -1351,14 +1328,12 @@ def test_create_inspect_template_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" - assert args[0].inspect_template == dlp.InspectTemplate(name="name_value") def test_create_inspect_template_flattened_error(): - client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1372,7 +1347,7 @@ def test_create_inspect_template_flattened_error(): @pytest.mark.asyncio async def test_create_inspect_template_flattened_async(): - client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1393,15 +1368,13 @@ async def test_create_inspect_template_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" - assert args[0].inspect_template == dlp.InspectTemplate(name="name_value") @pytest.mark.asyncio async def test_create_inspect_template_flattened_error_async(): - client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1417,7 +1390,7 @@ def test_update_inspect_template( transport: str = "grpc", request_type=dlp.UpdateInspectTemplateRequest ): client = DlpServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1434,23 +1407,17 @@ def test_update_inspect_template( display_name="display_name_value", description="description_value", ) - response = client.update_inspect_template(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == dlp.UpdateInspectTemplateRequest() # Establish that the response is the type that we expect. - assert isinstance(response, dlp.InspectTemplate) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" @@ -1462,7 +1429,7 @@ def test_update_inspect_template_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DlpServiceClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1472,7 +1439,6 @@ def test_update_inspect_template_empty_call(): client.update_inspect_template() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == dlp.UpdateInspectTemplateRequest() @@ -1481,7 +1447,7 @@ async def test_update_inspect_template_async( transport: str = "grpc_asyncio", request_type=dlp.UpdateInspectTemplateRequest ): client = DlpServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1500,22 +1466,17 @@ async def test_update_inspect_template_async( description="description_value", ) ) - response = await client.update_inspect_template(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == dlp.UpdateInspectTemplateRequest() # Establish that the response is the type that we expect. assert isinstance(response, dlp.InspectTemplate) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" @@ -1525,11 +1486,12 @@ async def test_update_inspect_template_async_from_dict(): def test_update_inspect_template_field_headers(): - client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = dlp.UpdateInspectTemplateRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1537,7 +1499,6 @@ def test_update_inspect_template_field_headers(): type(client.transport.update_inspect_template), "__call__" ) as call: call.return_value = dlp.InspectTemplate() - client.update_inspect_template(request) # Establish that the underlying gRPC stub method was called. @@ -1552,11 +1513,12 @@ def test_update_inspect_template_field_headers(): @pytest.mark.asyncio async def test_update_inspect_template_field_headers_async(): - client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = dlp.UpdateInspectTemplateRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1564,7 +1526,6 @@ async def test_update_inspect_template_field_headers_async(): type(client.transport.update_inspect_template), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.InspectTemplate()) - await client.update_inspect_template(request) # Establish that the underlying gRPC stub method was called. @@ -1578,7 +1539,7 @@ async def test_update_inspect_template_field_headers_async(): def test_update_inspect_template_flattened(): - client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1586,29 +1547,25 @@ def test_update_inspect_template_flattened(): ) as call: # Designate an appropriate return value for the call. call.return_value = dlp.InspectTemplate() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.update_inspect_template( name="name_value", inspect_template=dlp.InspectTemplate(name="name_value"), - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" - assert args[0].inspect_template == dlp.InspectTemplate(name="name_value") - - assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"]) + assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) def test_update_inspect_template_flattened_error(): - client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1617,13 +1574,13 @@ def test_update_inspect_template_flattened_error(): dlp.UpdateInspectTemplateRequest(), name="name_value", inspect_template=dlp.InspectTemplate(name="name_value"), - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.asyncio async def test_update_inspect_template_flattened_async(): - client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1638,24 +1595,21 @@ async def test_update_inspect_template_flattened_async(): response = await client.update_inspect_template( name="name_value", inspect_template=dlp.InspectTemplate(name="name_value"), - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" - assert args[0].inspect_template == dlp.InspectTemplate(name="name_value") - - assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"]) + assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) @pytest.mark.asyncio async def test_update_inspect_template_flattened_error_async(): - client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1664,7 +1618,7 @@ async def test_update_inspect_template_flattened_error_async(): dlp.UpdateInspectTemplateRequest(), name="name_value", inspect_template=dlp.InspectTemplate(name="name_value"), - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @@ -1672,7 +1626,7 @@ def test_get_inspect_template( transport: str = "grpc", request_type=dlp.GetInspectTemplateRequest ): client = DlpServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1689,23 +1643,17 @@ def test_get_inspect_template( display_name="display_name_value", description="description_value", ) - response = client.get_inspect_template(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == dlp.GetInspectTemplateRequest() # Establish that the response is the type that we expect. - assert isinstance(response, dlp.InspectTemplate) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" @@ -1717,7 +1665,7 @@ def test_get_inspect_template_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DlpServiceClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1727,7 +1675,6 @@ def test_get_inspect_template_empty_call(): client.get_inspect_template() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == dlp.GetInspectTemplateRequest() @@ -1736,7 +1683,7 @@ async def test_get_inspect_template_async( transport: str = "grpc_asyncio", request_type=dlp.GetInspectTemplateRequest ): client = DlpServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1755,22 +1702,17 @@ async def test_get_inspect_template_async( description="description_value", ) ) - response = await client.get_inspect_template(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == dlp.GetInspectTemplateRequest() # Establish that the response is the type that we expect. assert isinstance(response, dlp.InspectTemplate) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" @@ -1780,11 +1722,12 @@ async def test_get_inspect_template_async_from_dict(): def test_get_inspect_template_field_headers(): - client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = dlp.GetInspectTemplateRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1792,7 +1735,6 @@ def test_get_inspect_template_field_headers(): type(client.transport.get_inspect_template), "__call__" ) as call: call.return_value = dlp.InspectTemplate() - client.get_inspect_template(request) # Establish that the underlying gRPC stub method was called. @@ -1807,11 +1749,12 @@ def test_get_inspect_template_field_headers(): @pytest.mark.asyncio async def test_get_inspect_template_field_headers_async(): - client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = dlp.GetInspectTemplateRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1819,7 +1762,6 @@ async def test_get_inspect_template_field_headers_async(): type(client.transport.get_inspect_template), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.InspectTemplate()) - await client.get_inspect_template(request) # Establish that the underlying gRPC stub method was called. @@ -1833,7 +1775,7 @@ async def test_get_inspect_template_field_headers_async(): def test_get_inspect_template_flattened(): - client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1841,7 +1783,6 @@ def test_get_inspect_template_flattened(): ) as call: # Designate an appropriate return value for the call. call.return_value = dlp.InspectTemplate() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.get_inspect_template(name="name_value",) @@ -1850,12 +1791,11 @@ def test_get_inspect_template_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" def test_get_inspect_template_flattened_error(): - client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1867,7 +1807,7 @@ def test_get_inspect_template_flattened_error(): @pytest.mark.asyncio async def test_get_inspect_template_flattened_async(): - client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1885,13 +1825,12 @@ async def test_get_inspect_template_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" @pytest.mark.asyncio async def test_get_inspect_template_flattened_error_async(): - client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1905,7 +1844,7 @@ def test_list_inspect_templates( transport: str = "grpc", request_type=dlp.ListInspectTemplatesRequest ): client = DlpServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1920,19 +1859,15 @@ def test_list_inspect_templates( call.return_value = dlp.ListInspectTemplatesResponse( next_page_token="next_page_token_value", ) - response = client.list_inspect_templates(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == dlp.ListInspectTemplatesRequest() # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListInspectTemplatesPager) - assert response.next_page_token == "next_page_token_value" @@ -1944,7 +1879,7 @@ def test_list_inspect_templates_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DlpServiceClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1954,7 +1889,6 @@ def test_list_inspect_templates_empty_call(): client.list_inspect_templates() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == dlp.ListInspectTemplatesRequest() @@ -1963,7 +1897,7 @@ async def test_list_inspect_templates_async( transport: str = "grpc_asyncio", request_type=dlp.ListInspectTemplatesRequest ): client = DlpServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1978,18 +1912,15 @@ async def test_list_inspect_templates_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( dlp.ListInspectTemplatesResponse(next_page_token="next_page_token_value",) ) - response = await client.list_inspect_templates(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == dlp.ListInspectTemplatesRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListInspectTemplatesAsyncPager) - assert response.next_page_token == "next_page_token_value" @@ -1999,11 +1930,12 @@ async def test_list_inspect_templates_async_from_dict(): def test_list_inspect_templates_field_headers(): - client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = dlp.ListInspectTemplatesRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -2011,7 +1943,6 @@ def test_list_inspect_templates_field_headers(): type(client.transport.list_inspect_templates), "__call__" ) as call: call.return_value = dlp.ListInspectTemplatesResponse() - client.list_inspect_templates(request) # Establish that the underlying gRPC stub method was called. @@ -2026,11 +1957,12 @@ def test_list_inspect_templates_field_headers(): @pytest.mark.asyncio async def test_list_inspect_templates_field_headers_async(): - client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = dlp.ListInspectTemplatesRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -2040,7 +1972,6 @@ async def test_list_inspect_templates_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( dlp.ListInspectTemplatesResponse() ) - await client.list_inspect_templates(request) # Establish that the underlying gRPC stub method was called. @@ -2054,7 +1985,7 @@ async def test_list_inspect_templates_field_headers_async(): def test_list_inspect_templates_flattened(): - client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2062,7 +1993,6 @@ def test_list_inspect_templates_flattened(): ) as call: # Designate an appropriate return value for the call. call.return_value = dlp.ListInspectTemplatesResponse() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.list_inspect_templates(parent="parent_value",) @@ -2071,12 +2001,11 @@ def test_list_inspect_templates_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" def test_list_inspect_templates_flattened_error(): - client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2088,7 +2017,7 @@ def test_list_inspect_templates_flattened_error(): @pytest.mark.asyncio async def test_list_inspect_templates_flattened_async(): - client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2108,13 +2037,12 @@ async def test_list_inspect_templates_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" @pytest.mark.asyncio async def test_list_inspect_templates_flattened_error_async(): - client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2125,7 +2053,7 @@ async def test_list_inspect_templates_flattened_error_async(): def test_list_inspect_templates_pager(): - client = DlpServiceClient(credentials=credentials.AnonymousCredentials,) + client = DlpServiceClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2167,7 +2095,7 @@ def test_list_inspect_templates_pager(): def test_list_inspect_templates_pages(): - client = DlpServiceClient(credentials=credentials.AnonymousCredentials,) + client = DlpServiceClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2201,7 +2129,7 @@ def test_list_inspect_templates_pages(): @pytest.mark.asyncio async def test_list_inspect_templates_async_pager(): - client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials,) + client = DlpServiceAsyncClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2242,7 +2170,7 @@ async def test_list_inspect_templates_async_pager(): @pytest.mark.asyncio async def test_list_inspect_templates_async_pages(): - client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials,) + client = DlpServiceAsyncClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2282,7 +2210,7 @@ def test_delete_inspect_template( transport: str = "grpc", request_type=dlp.DeleteInspectTemplateRequest ): client = DlpServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2295,13 +2223,11 @@ def test_delete_inspect_template( ) as call: # Designate an appropriate return value for the call. call.return_value = None - response = client.delete_inspect_template(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == dlp.DeleteInspectTemplateRequest() # Establish that the response is the type that we expect. @@ -2316,7 +2242,7 @@ def test_delete_inspect_template_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DlpServiceClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2326,7 +2252,6 @@ def test_delete_inspect_template_empty_call(): client.delete_inspect_template() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == dlp.DeleteInspectTemplateRequest() @@ -2335,7 +2260,7 @@ async def test_delete_inspect_template_async( transport: str = "grpc_asyncio", request_type=dlp.DeleteInspectTemplateRequest ): client = DlpServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2348,13 +2273,11 @@ async def test_delete_inspect_template_async( ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_inspect_template(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == dlp.DeleteInspectTemplateRequest() # Establish that the response is the type that we expect. @@ -2367,11 +2290,12 @@ async def test_delete_inspect_template_async_from_dict(): def test_delete_inspect_template_field_headers(): - client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = dlp.DeleteInspectTemplateRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -2379,7 +2303,6 @@ def test_delete_inspect_template_field_headers(): type(client.transport.delete_inspect_template), "__call__" ) as call: call.return_value = None - client.delete_inspect_template(request) # Establish that the underlying gRPC stub method was called. @@ -2394,11 +2317,12 @@ def test_delete_inspect_template_field_headers(): @pytest.mark.asyncio async def test_delete_inspect_template_field_headers_async(): - client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = dlp.DeleteInspectTemplateRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -2406,7 +2330,6 @@ async def test_delete_inspect_template_field_headers_async(): type(client.transport.delete_inspect_template), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_inspect_template(request) # Establish that the underlying gRPC stub method was called. @@ -2420,7 +2343,7 @@ async def test_delete_inspect_template_field_headers_async(): def test_delete_inspect_template_flattened(): - client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2428,7 +2351,6 @@ def test_delete_inspect_template_flattened(): ) as call: # Designate an appropriate return value for the call. call.return_value = None - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.delete_inspect_template(name="name_value",) @@ -2437,12 +2359,11 @@ def test_delete_inspect_template_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" def test_delete_inspect_template_flattened_error(): - client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2454,7 +2375,7 @@ def test_delete_inspect_template_flattened_error(): @pytest.mark.asyncio async def test_delete_inspect_template_flattened_async(): - client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2472,13 +2393,12 @@ async def test_delete_inspect_template_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" @pytest.mark.asyncio async def test_delete_inspect_template_flattened_error_async(): - client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2492,7 +2412,7 @@ def test_create_deidentify_template( transport: str = "grpc", request_type=dlp.CreateDeidentifyTemplateRequest ): client = DlpServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2509,23 +2429,17 @@ def test_create_deidentify_template( display_name="display_name_value", description="description_value", ) - response = client.create_deidentify_template(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == dlp.CreateDeidentifyTemplateRequest() # Establish that the response is the type that we expect. - assert isinstance(response, dlp.DeidentifyTemplate) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" @@ -2537,7 +2451,7 @@ def test_create_deidentify_template_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DlpServiceClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2547,7 +2461,6 @@ def test_create_deidentify_template_empty_call(): client.create_deidentify_template() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == dlp.CreateDeidentifyTemplateRequest() @@ -2556,7 +2469,7 @@ async def test_create_deidentify_template_async( transport: str = "grpc_asyncio", request_type=dlp.CreateDeidentifyTemplateRequest ): client = DlpServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2575,22 +2488,17 @@ async def test_create_deidentify_template_async( description="description_value", ) ) - response = await client.create_deidentify_template(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == dlp.CreateDeidentifyTemplateRequest() # Establish that the response is the type that we expect. assert isinstance(response, dlp.DeidentifyTemplate) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" @@ -2600,11 +2508,12 @@ async def test_create_deidentify_template_async_from_dict(): def test_create_deidentify_template_field_headers(): - client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = dlp.CreateDeidentifyTemplateRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -2612,7 +2521,6 @@ def test_create_deidentify_template_field_headers(): type(client.transport.create_deidentify_template), "__call__" ) as call: call.return_value = dlp.DeidentifyTemplate() - client.create_deidentify_template(request) # Establish that the underlying gRPC stub method was called. @@ -2627,11 +2535,12 @@ def test_create_deidentify_template_field_headers(): @pytest.mark.asyncio async def test_create_deidentify_template_field_headers_async(): - client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = dlp.CreateDeidentifyTemplateRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -2641,7 +2550,6 @@ async def test_create_deidentify_template_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( dlp.DeidentifyTemplate() ) - await client.create_deidentify_template(request) # Establish that the underlying gRPC stub method was called. @@ -2655,7 +2563,7 @@ async def test_create_deidentify_template_field_headers_async(): def test_create_deidentify_template_flattened(): - client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2663,7 +2571,6 @@ def test_create_deidentify_template_flattened(): ) as call: # Designate an appropriate return value for the call. call.return_value = dlp.DeidentifyTemplate() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.create_deidentify_template( @@ -2675,14 +2582,12 @@ def test_create_deidentify_template_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" - assert args[0].deidentify_template == dlp.DeidentifyTemplate(name="name_value") def test_create_deidentify_template_flattened_error(): - client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2696,7 +2601,7 @@ def test_create_deidentify_template_flattened_error(): @pytest.mark.asyncio async def test_create_deidentify_template_flattened_async(): - client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2719,15 +2624,13 @@ async def test_create_deidentify_template_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" - assert args[0].deidentify_template == dlp.DeidentifyTemplate(name="name_value") @pytest.mark.asyncio async def test_create_deidentify_template_flattened_error_async(): - client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2743,7 +2646,7 @@ def test_update_deidentify_template( transport: str = "grpc", request_type=dlp.UpdateDeidentifyTemplateRequest ): client = DlpServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2760,23 +2663,17 @@ def test_update_deidentify_template( display_name="display_name_value", description="description_value", ) - response = client.update_deidentify_template(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == dlp.UpdateDeidentifyTemplateRequest() # Establish that the response is the type that we expect. - assert isinstance(response, dlp.DeidentifyTemplate) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" @@ -2788,7 +2685,7 @@ def test_update_deidentify_template_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DlpServiceClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2798,7 +2695,6 @@ def test_update_deidentify_template_empty_call(): client.update_deidentify_template() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == dlp.UpdateDeidentifyTemplateRequest() @@ -2807,7 +2703,7 @@ async def test_update_deidentify_template_async( transport: str = "grpc_asyncio", request_type=dlp.UpdateDeidentifyTemplateRequest ): client = DlpServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2826,22 +2722,17 @@ async def test_update_deidentify_template_async( description="description_value", ) ) - response = await client.update_deidentify_template(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == dlp.UpdateDeidentifyTemplateRequest() # Establish that the response is the type that we expect. assert isinstance(response, dlp.DeidentifyTemplate) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" @@ -2851,11 +2742,12 @@ async def test_update_deidentify_template_async_from_dict(): def test_update_deidentify_template_field_headers(): - client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = dlp.UpdateDeidentifyTemplateRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -2863,7 +2755,6 @@ def test_update_deidentify_template_field_headers(): type(client.transport.update_deidentify_template), "__call__" ) as call: call.return_value = dlp.DeidentifyTemplate() - client.update_deidentify_template(request) # Establish that the underlying gRPC stub method was called. @@ -2878,11 +2769,12 @@ def test_update_deidentify_template_field_headers(): @pytest.mark.asyncio async def test_update_deidentify_template_field_headers_async(): - client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = dlp.UpdateDeidentifyTemplateRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -2892,7 +2784,6 @@ async def test_update_deidentify_template_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( dlp.DeidentifyTemplate() ) - await client.update_deidentify_template(request) # Establish that the underlying gRPC stub method was called. @@ -2906,7 +2797,7 @@ async def test_update_deidentify_template_field_headers_async(): def test_update_deidentify_template_flattened(): - client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2914,29 +2805,25 @@ def test_update_deidentify_template_flattened(): ) as call: # Designate an appropriate return value for the call. call.return_value = dlp.DeidentifyTemplate() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.update_deidentify_template( name="name_value", deidentify_template=dlp.DeidentifyTemplate(name="name_value"), - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" - assert args[0].deidentify_template == dlp.DeidentifyTemplate(name="name_value") - - assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"]) + assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) def test_update_deidentify_template_flattened_error(): - client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2945,13 +2832,13 @@ def test_update_deidentify_template_flattened_error(): dlp.UpdateDeidentifyTemplateRequest(), name="name_value", deidentify_template=dlp.DeidentifyTemplate(name="name_value"), - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.asyncio async def test_update_deidentify_template_flattened_async(): - client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2968,24 +2855,21 @@ async def test_update_deidentify_template_flattened_async(): response = await client.update_deidentify_template( name="name_value", deidentify_template=dlp.DeidentifyTemplate(name="name_value"), - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" - assert args[0].deidentify_template == dlp.DeidentifyTemplate(name="name_value") - - assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"]) + assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) @pytest.mark.asyncio async def test_update_deidentify_template_flattened_error_async(): - client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2994,7 +2878,7 @@ async def test_update_deidentify_template_flattened_error_async(): dlp.UpdateDeidentifyTemplateRequest(), name="name_value", deidentify_template=dlp.DeidentifyTemplate(name="name_value"), - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @@ -3002,7 +2886,7 @@ def test_get_deidentify_template( transport: str = "grpc", request_type=dlp.GetDeidentifyTemplateRequest ): client = DlpServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3019,23 +2903,17 @@ def test_get_deidentify_template( display_name="display_name_value", description="description_value", ) - response = client.get_deidentify_template(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == dlp.GetDeidentifyTemplateRequest() # Establish that the response is the type that we expect. - assert isinstance(response, dlp.DeidentifyTemplate) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" @@ -3047,7 +2925,7 @@ def test_get_deidentify_template_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DlpServiceClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3057,7 +2935,6 @@ def test_get_deidentify_template_empty_call(): client.get_deidentify_template() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == dlp.GetDeidentifyTemplateRequest() @@ -3066,7 +2943,7 @@ async def test_get_deidentify_template_async( transport: str = "grpc_asyncio", request_type=dlp.GetDeidentifyTemplateRequest ): client = DlpServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3085,22 +2962,17 @@ async def test_get_deidentify_template_async( description="description_value", ) ) - response = await client.get_deidentify_template(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == dlp.GetDeidentifyTemplateRequest() # Establish that the response is the type that we expect. assert isinstance(response, dlp.DeidentifyTemplate) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" @@ -3110,11 +2982,12 @@ async def test_get_deidentify_template_async_from_dict(): def test_get_deidentify_template_field_headers(): - client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = dlp.GetDeidentifyTemplateRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -3122,7 +2995,6 @@ def test_get_deidentify_template_field_headers(): type(client.transport.get_deidentify_template), "__call__" ) as call: call.return_value = dlp.DeidentifyTemplate() - client.get_deidentify_template(request) # Establish that the underlying gRPC stub method was called. @@ -3137,11 +3009,12 @@ def test_get_deidentify_template_field_headers(): @pytest.mark.asyncio async def test_get_deidentify_template_field_headers_async(): - client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = dlp.GetDeidentifyTemplateRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -3151,7 +3024,6 @@ async def test_get_deidentify_template_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( dlp.DeidentifyTemplate() ) - await client.get_deidentify_template(request) # Establish that the underlying gRPC stub method was called. @@ -3165,7 +3037,7 @@ async def test_get_deidentify_template_field_headers_async(): def test_get_deidentify_template_flattened(): - client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -3173,7 +3045,6 @@ def test_get_deidentify_template_flattened(): ) as call: # Designate an appropriate return value for the call. call.return_value = dlp.DeidentifyTemplate() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.get_deidentify_template(name="name_value",) @@ -3182,12 +3053,11 @@ def test_get_deidentify_template_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" def test_get_deidentify_template_flattened_error(): - client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -3199,7 +3069,7 @@ def test_get_deidentify_template_flattened_error(): @pytest.mark.asyncio async def test_get_deidentify_template_flattened_async(): - client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -3219,13 +3089,12 @@ async def test_get_deidentify_template_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" @pytest.mark.asyncio async def test_get_deidentify_template_flattened_error_async(): - client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -3239,7 +3108,7 @@ def test_list_deidentify_templates( transport: str = "grpc", request_type=dlp.ListDeidentifyTemplatesRequest ): client = DlpServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3254,19 +3123,15 @@ def test_list_deidentify_templates( call.return_value = dlp.ListDeidentifyTemplatesResponse( next_page_token="next_page_token_value", ) - response = client.list_deidentify_templates(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == dlp.ListDeidentifyTemplatesRequest() # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDeidentifyTemplatesPager) - assert response.next_page_token == "next_page_token_value" @@ -3278,7 +3143,7 @@ def test_list_deidentify_templates_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DlpServiceClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3288,7 +3153,6 @@ def test_list_deidentify_templates_empty_call(): client.list_deidentify_templates() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == dlp.ListDeidentifyTemplatesRequest() @@ -3297,7 +3161,7 @@ async def test_list_deidentify_templates_async( transport: str = "grpc_asyncio", request_type=dlp.ListDeidentifyTemplatesRequest ): client = DlpServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3314,18 +3178,15 @@ async def test_list_deidentify_templates_async( next_page_token="next_page_token_value", ) ) - response = await client.list_deidentify_templates(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == dlp.ListDeidentifyTemplatesRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListDeidentifyTemplatesAsyncPager) - assert response.next_page_token == "next_page_token_value" @@ -3335,11 +3196,12 @@ async def test_list_deidentify_templates_async_from_dict(): def test_list_deidentify_templates_field_headers(): - client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = dlp.ListDeidentifyTemplatesRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -3347,7 +3209,6 @@ def test_list_deidentify_templates_field_headers(): type(client.transport.list_deidentify_templates), "__call__" ) as call: call.return_value = dlp.ListDeidentifyTemplatesResponse() - client.list_deidentify_templates(request) # Establish that the underlying gRPC stub method was called. @@ -3362,11 +3223,12 @@ def test_list_deidentify_templates_field_headers(): @pytest.mark.asyncio async def test_list_deidentify_templates_field_headers_async(): - client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = dlp.ListDeidentifyTemplatesRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -3376,7 +3238,6 @@ async def test_list_deidentify_templates_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( dlp.ListDeidentifyTemplatesResponse() ) - await client.list_deidentify_templates(request) # Establish that the underlying gRPC stub method was called. @@ -3390,7 +3251,7 @@ async def test_list_deidentify_templates_field_headers_async(): def test_list_deidentify_templates_flattened(): - client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -3398,7 +3259,6 @@ def test_list_deidentify_templates_flattened(): ) as call: # Designate an appropriate return value for the call. call.return_value = dlp.ListDeidentifyTemplatesResponse() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.list_deidentify_templates(parent="parent_value",) @@ -3407,12 +3267,11 @@ def test_list_deidentify_templates_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" def test_list_deidentify_templates_flattened_error(): - client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -3424,7 +3283,7 @@ def test_list_deidentify_templates_flattened_error(): @pytest.mark.asyncio async def test_list_deidentify_templates_flattened_async(): - client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -3444,13 +3303,12 @@ async def test_list_deidentify_templates_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" @pytest.mark.asyncio async def test_list_deidentify_templates_flattened_error_async(): - client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -3461,7 +3319,7 @@ async def test_list_deidentify_templates_flattened_error_async(): def test_list_deidentify_templates_pager(): - client = DlpServiceClient(credentials=credentials.AnonymousCredentials,) + client = DlpServiceClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -3506,7 +3364,7 @@ def test_list_deidentify_templates_pager(): def test_list_deidentify_templates_pages(): - client = DlpServiceClient(credentials=credentials.AnonymousCredentials,) + client = DlpServiceClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -3543,7 +3401,7 @@ def test_list_deidentify_templates_pages(): @pytest.mark.asyncio async def test_list_deidentify_templates_async_pager(): - client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials,) + client = DlpServiceAsyncClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -3587,7 +3445,7 @@ async def test_list_deidentify_templates_async_pager(): @pytest.mark.asyncio async def test_list_deidentify_templates_async_pages(): - client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials,) + client = DlpServiceAsyncClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -3630,7 +3488,7 @@ def test_delete_deidentify_template( transport: str = "grpc", request_type=dlp.DeleteDeidentifyTemplateRequest ): client = DlpServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3643,13 +3501,11 @@ def test_delete_deidentify_template( ) as call: # Designate an appropriate return value for the call. call.return_value = None - response = client.delete_deidentify_template(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == dlp.DeleteDeidentifyTemplateRequest() # Establish that the response is the type that we expect. @@ -3664,7 +3520,7 @@ def test_delete_deidentify_template_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DlpServiceClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3674,7 +3530,6 @@ def test_delete_deidentify_template_empty_call(): client.delete_deidentify_template() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == dlp.DeleteDeidentifyTemplateRequest() @@ -3683,7 +3538,7 @@ async def test_delete_deidentify_template_async( transport: str = "grpc_asyncio", request_type=dlp.DeleteDeidentifyTemplateRequest ): client = DlpServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3696,13 +3551,11 @@ async def test_delete_deidentify_template_async( ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_deidentify_template(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == dlp.DeleteDeidentifyTemplateRequest() # Establish that the response is the type that we expect. @@ -3715,11 +3568,12 @@ async def test_delete_deidentify_template_async_from_dict(): def test_delete_deidentify_template_field_headers(): - client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = dlp.DeleteDeidentifyTemplateRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -3727,7 +3581,6 @@ def test_delete_deidentify_template_field_headers(): type(client.transport.delete_deidentify_template), "__call__" ) as call: call.return_value = None - client.delete_deidentify_template(request) # Establish that the underlying gRPC stub method was called. @@ -3742,11 +3595,12 @@ def test_delete_deidentify_template_field_headers(): @pytest.mark.asyncio async def test_delete_deidentify_template_field_headers_async(): - client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = dlp.DeleteDeidentifyTemplateRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -3754,7 +3608,6 @@ async def test_delete_deidentify_template_field_headers_async(): type(client.transport.delete_deidentify_template), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_deidentify_template(request) # Establish that the underlying gRPC stub method was called. @@ -3768,7 +3621,7 @@ async def test_delete_deidentify_template_field_headers_async(): def test_delete_deidentify_template_flattened(): - client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -3776,7 +3629,6 @@ def test_delete_deidentify_template_flattened(): ) as call: # Designate an appropriate return value for the call. call.return_value = None - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.delete_deidentify_template(name="name_value",) @@ -3785,12 +3637,11 @@ def test_delete_deidentify_template_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" def test_delete_deidentify_template_flattened_error(): - client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -3802,7 +3653,7 @@ def test_delete_deidentify_template_flattened_error(): @pytest.mark.asyncio async def test_delete_deidentify_template_flattened_async(): - client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -3820,13 +3671,12 @@ async def test_delete_deidentify_template_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" @pytest.mark.asyncio async def test_delete_deidentify_template_flattened_error_async(): - client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -3840,7 +3690,7 @@ def test_create_job_trigger( transport: str = "grpc", request_type=dlp.CreateJobTriggerRequest ): client = DlpServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3865,25 +3715,18 @@ def test_create_job_trigger( ) ), ) - response = client.create_job_trigger(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == dlp.CreateJobTriggerRequest() # Establish that the response is the type that we expect. - assert isinstance(response, dlp.JobTrigger) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.status == dlp.JobTrigger.Status.HEALTHY @@ -3895,7 +3738,7 @@ def test_create_job_trigger_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DlpServiceClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3905,7 +3748,6 @@ def test_create_job_trigger_empty_call(): client.create_job_trigger() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == dlp.CreateJobTriggerRequest() @@ -3914,7 +3756,7 @@ async def test_create_job_trigger_async( transport: str = "grpc_asyncio", request_type=dlp.CreateJobTriggerRequest ): client = DlpServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3934,24 +3776,18 @@ async def test_create_job_trigger_async( status=dlp.JobTrigger.Status.HEALTHY, ) ) - response = await client.create_job_trigger(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == dlp.CreateJobTriggerRequest() # Establish that the response is the type that we expect. assert isinstance(response, dlp.JobTrigger) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.status == dlp.JobTrigger.Status.HEALTHY @@ -3961,11 +3797,12 @@ async def test_create_job_trigger_async_from_dict(): def test_create_job_trigger_field_headers(): - client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = dlp.CreateJobTriggerRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -3973,7 +3810,6 @@ def test_create_job_trigger_field_headers(): type(client.transport.create_job_trigger), "__call__" ) as call: call.return_value = dlp.JobTrigger() - client.create_job_trigger(request) # Establish that the underlying gRPC stub method was called. @@ -3988,11 +3824,12 @@ def test_create_job_trigger_field_headers(): @pytest.mark.asyncio async def test_create_job_trigger_field_headers_async(): - client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = dlp.CreateJobTriggerRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -4000,7 +3837,6 @@ async def test_create_job_trigger_field_headers_async(): type(client.transport.create_job_trigger), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.JobTrigger()) - await client.create_job_trigger(request) # Establish that the underlying gRPC stub method was called. @@ -4014,7 +3850,7 @@ async def test_create_job_trigger_field_headers_async(): def test_create_job_trigger_flattened(): - client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -4022,7 +3858,6 @@ def test_create_job_trigger_flattened(): ) as call: # Designate an appropriate return value for the call. call.return_value = dlp.JobTrigger() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.create_job_trigger( @@ -4033,14 +3868,12 @@ def test_create_job_trigger_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" - assert args[0].job_trigger == dlp.JobTrigger(name="name_value") def test_create_job_trigger_flattened_error(): - client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -4054,7 +3887,7 @@ def test_create_job_trigger_flattened_error(): @pytest.mark.asyncio async def test_create_job_trigger_flattened_async(): - client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -4074,15 +3907,13 @@ async def test_create_job_trigger_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" - assert args[0].job_trigger == dlp.JobTrigger(name="name_value") @pytest.mark.asyncio async def test_create_job_trigger_flattened_error_async(): - client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -4098,7 +3929,7 @@ def test_update_job_trigger( transport: str = "grpc", request_type=dlp.UpdateJobTriggerRequest ): client = DlpServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -4123,25 +3954,18 @@ def test_update_job_trigger( ) ), ) - response = client.update_job_trigger(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == dlp.UpdateJobTriggerRequest() # Establish that the response is the type that we expect. - assert isinstance(response, dlp.JobTrigger) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.status == dlp.JobTrigger.Status.HEALTHY @@ -4153,7 +3977,7 @@ def test_update_job_trigger_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DlpServiceClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4163,7 +3987,6 @@ def test_update_job_trigger_empty_call(): client.update_job_trigger() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == dlp.UpdateJobTriggerRequest() @@ -4172,7 +3995,7 @@ async def test_update_job_trigger_async( transport: str = "grpc_asyncio", request_type=dlp.UpdateJobTriggerRequest ): client = DlpServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -4192,24 +4015,18 @@ async def test_update_job_trigger_async( status=dlp.JobTrigger.Status.HEALTHY, ) ) - response = await client.update_job_trigger(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == dlp.UpdateJobTriggerRequest() # Establish that the response is the type that we expect. assert isinstance(response, dlp.JobTrigger) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.status == dlp.JobTrigger.Status.HEALTHY @@ -4219,11 +4036,12 @@ async def test_update_job_trigger_async_from_dict(): def test_update_job_trigger_field_headers(): - client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = dlp.UpdateJobTriggerRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -4231,7 +4049,6 @@ def test_update_job_trigger_field_headers(): type(client.transport.update_job_trigger), "__call__" ) as call: call.return_value = dlp.JobTrigger() - client.update_job_trigger(request) # Establish that the underlying gRPC stub method was called. @@ -4246,11 +4063,12 @@ def test_update_job_trigger_field_headers(): @pytest.mark.asyncio async def test_update_job_trigger_field_headers_async(): - client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = dlp.UpdateJobTriggerRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -4258,7 +4076,6 @@ async def test_update_job_trigger_field_headers_async(): type(client.transport.update_job_trigger), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.JobTrigger()) - await client.update_job_trigger(request) # Establish that the underlying gRPC stub method was called. @@ -4272,7 +4089,7 @@ async def test_update_job_trigger_field_headers_async(): def test_update_job_trigger_flattened(): - client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -4280,29 +4097,25 @@ def test_update_job_trigger_flattened(): ) as call: # Designate an appropriate return value for the call. call.return_value = dlp.JobTrigger() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.update_job_trigger( name="name_value", job_trigger=dlp.JobTrigger(name="name_value"), - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" - assert args[0].job_trigger == dlp.JobTrigger(name="name_value") - - assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"]) + assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) def test_update_job_trigger_flattened_error(): - client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -4311,13 +4124,13 @@ def test_update_job_trigger_flattened_error(): dlp.UpdateJobTriggerRequest(), name="name_value", job_trigger=dlp.JobTrigger(name="name_value"), - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.asyncio async def test_update_job_trigger_flattened_async(): - client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -4332,24 +4145,21 @@ async def test_update_job_trigger_flattened_async(): response = await client.update_job_trigger( name="name_value", job_trigger=dlp.JobTrigger(name="name_value"), - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" - assert args[0].job_trigger == dlp.JobTrigger(name="name_value") - - assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"]) + assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) @pytest.mark.asyncio async def test_update_job_trigger_flattened_error_async(): - client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -4358,7 +4168,7 @@ async def test_update_job_trigger_flattened_error_async(): dlp.UpdateJobTriggerRequest(), name="name_value", job_trigger=dlp.JobTrigger(name="name_value"), - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @@ -4366,7 +4176,7 @@ def test_hybrid_inspect_job_trigger( transport: str = "grpc", request_type=dlp.HybridInspectJobTriggerRequest ): client = DlpServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -4379,17 +4189,14 @@ def test_hybrid_inspect_job_trigger( ) as call: # Designate an appropriate return value for the call. call.return_value = dlp.HybridInspectResponse() - response = client.hybrid_inspect_job_trigger(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == dlp.HybridInspectJobTriggerRequest() # Establish that the response is the type that we expect. - assert isinstance(response, dlp.HybridInspectResponse) @@ -4401,7 +4208,7 @@ def test_hybrid_inspect_job_trigger_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DlpServiceClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4411,7 +4218,6 @@ def test_hybrid_inspect_job_trigger_empty_call(): client.hybrid_inspect_job_trigger() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == dlp.HybridInspectJobTriggerRequest() @@ -4420,7 +4226,7 @@ async def test_hybrid_inspect_job_trigger_async( transport: str = "grpc_asyncio", request_type=dlp.HybridInspectJobTriggerRequest ): client = DlpServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -4435,13 +4241,11 @@ async def test_hybrid_inspect_job_trigger_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( dlp.HybridInspectResponse() ) - response = await client.hybrid_inspect_job_trigger(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == dlp.HybridInspectJobTriggerRequest() # Establish that the response is the type that we expect. @@ -4454,11 +4258,12 @@ async def test_hybrid_inspect_job_trigger_async_from_dict(): def test_hybrid_inspect_job_trigger_field_headers(): - client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = dlp.HybridInspectJobTriggerRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -4466,7 +4271,6 @@ def test_hybrid_inspect_job_trigger_field_headers(): type(client.transport.hybrid_inspect_job_trigger), "__call__" ) as call: call.return_value = dlp.HybridInspectResponse() - client.hybrid_inspect_job_trigger(request) # Establish that the underlying gRPC stub method was called. @@ -4481,11 +4285,12 @@ def test_hybrid_inspect_job_trigger_field_headers(): @pytest.mark.asyncio async def test_hybrid_inspect_job_trigger_field_headers_async(): - client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = dlp.HybridInspectJobTriggerRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -4495,7 +4300,6 @@ async def test_hybrid_inspect_job_trigger_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( dlp.HybridInspectResponse() ) - await client.hybrid_inspect_job_trigger(request) # Establish that the underlying gRPC stub method was called. @@ -4509,7 +4313,7 @@ async def test_hybrid_inspect_job_trigger_field_headers_async(): def test_hybrid_inspect_job_trigger_flattened(): - client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -4517,7 +4321,6 @@ def test_hybrid_inspect_job_trigger_flattened(): ) as call: # Designate an appropriate return value for the call. call.return_value = dlp.HybridInspectResponse() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.hybrid_inspect_job_trigger(name="name_value",) @@ -4526,12 +4329,11 @@ def test_hybrid_inspect_job_trigger_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" def test_hybrid_inspect_job_trigger_flattened_error(): - client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -4543,7 +4345,7 @@ def test_hybrid_inspect_job_trigger_flattened_error(): @pytest.mark.asyncio async def test_hybrid_inspect_job_trigger_flattened_async(): - client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -4563,13 +4365,12 @@ async def test_hybrid_inspect_job_trigger_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" @pytest.mark.asyncio async def test_hybrid_inspect_job_trigger_flattened_error_async(): - client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -4583,7 +4384,7 @@ def test_get_job_trigger( transport: str = "grpc", request_type=dlp.GetJobTriggerRequest ): client = DlpServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -4606,25 +4407,18 @@ def test_get_job_trigger( ) ), ) - response = client.get_job_trigger(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == dlp.GetJobTriggerRequest() # Establish that the response is the type that we expect. - assert isinstance(response, dlp.JobTrigger) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.status == dlp.JobTrigger.Status.HEALTHY @@ -4636,7 +4430,7 @@ def test_get_job_trigger_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DlpServiceClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4644,7 +4438,6 @@ def test_get_job_trigger_empty_call(): client.get_job_trigger() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == dlp.GetJobTriggerRequest() @@ -4653,7 +4446,7 @@ async def test_get_job_trigger_async( transport: str = "grpc_asyncio", request_type=dlp.GetJobTriggerRequest ): client = DlpServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -4671,24 +4464,18 @@ async def test_get_job_trigger_async( status=dlp.JobTrigger.Status.HEALTHY, ) ) - response = await client.get_job_trigger(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == dlp.GetJobTriggerRequest() # Establish that the response is the type that we expect. assert isinstance(response, dlp.JobTrigger) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.status == dlp.JobTrigger.Status.HEALTHY @@ -4698,17 +4485,17 @@ async def test_get_job_trigger_async_from_dict(): def test_get_job_trigger_field_headers(): - client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = dlp.GetJobTriggerRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_job_trigger), "__call__") as call: call.return_value = dlp.JobTrigger() - client.get_job_trigger(request) # Establish that the underlying gRPC stub method was called. @@ -4723,17 +4510,17 @@ def test_get_job_trigger_field_headers(): @pytest.mark.asyncio async def test_get_job_trigger_field_headers_async(): - client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = dlp.GetJobTriggerRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_job_trigger), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.JobTrigger()) - await client.get_job_trigger(request) # Establish that the underlying gRPC stub method was called. @@ -4747,13 +4534,12 @@ async def test_get_job_trigger_field_headers_async(): def test_get_job_trigger_flattened(): - client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_job_trigger), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = dlp.JobTrigger() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.get_job_trigger(name="name_value",) @@ -4762,12 +4548,11 @@ def test_get_job_trigger_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" def test_get_job_trigger_flattened_error(): - client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -4779,7 +4564,7 @@ def test_get_job_trigger_flattened_error(): @pytest.mark.asyncio async def test_get_job_trigger_flattened_async(): - client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_job_trigger), "__call__") as call: @@ -4795,13 +4580,12 @@ async def test_get_job_trigger_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" @pytest.mark.asyncio async def test_get_job_trigger_flattened_error_async(): - client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -4815,7 +4599,7 @@ def test_list_job_triggers( transport: str = "grpc", request_type=dlp.ListJobTriggersRequest ): client = DlpServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -4830,19 +4614,15 @@ def test_list_job_triggers( call.return_value = dlp.ListJobTriggersResponse( next_page_token="next_page_token_value", ) - response = client.list_job_triggers(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == dlp.ListJobTriggersRequest() # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListJobTriggersPager) - assert response.next_page_token == "next_page_token_value" @@ -4854,7 +4634,7 @@ def test_list_job_triggers_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DlpServiceClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4864,7 +4644,6 @@ def test_list_job_triggers_empty_call(): client.list_job_triggers() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == dlp.ListJobTriggersRequest() @@ -4873,7 +4652,7 @@ async def test_list_job_triggers_async( transport: str = "grpc_asyncio", request_type=dlp.ListJobTriggersRequest ): client = DlpServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -4888,18 +4667,15 @@ async def test_list_job_triggers_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( dlp.ListJobTriggersResponse(next_page_token="next_page_token_value",) ) - response = await client.list_job_triggers(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == dlp.ListJobTriggersRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListJobTriggersAsyncPager) - assert response.next_page_token == "next_page_token_value" @@ -4909,11 +4685,12 @@ async def test_list_job_triggers_async_from_dict(): def test_list_job_triggers_field_headers(): - client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = dlp.ListJobTriggersRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -4921,7 +4698,6 @@ def test_list_job_triggers_field_headers(): type(client.transport.list_job_triggers), "__call__" ) as call: call.return_value = dlp.ListJobTriggersResponse() - client.list_job_triggers(request) # Establish that the underlying gRPC stub method was called. @@ -4936,11 +4712,12 @@ def test_list_job_triggers_field_headers(): @pytest.mark.asyncio async def test_list_job_triggers_field_headers_async(): - client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = dlp.ListJobTriggersRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -4950,7 +4727,6 @@ async def test_list_job_triggers_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( dlp.ListJobTriggersResponse() ) - await client.list_job_triggers(request) # Establish that the underlying gRPC stub method was called. @@ -4964,7 +4740,7 @@ async def test_list_job_triggers_field_headers_async(): def test_list_job_triggers_flattened(): - client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -4972,7 +4748,6 @@ def test_list_job_triggers_flattened(): ) as call: # Designate an appropriate return value for the call. call.return_value = dlp.ListJobTriggersResponse() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.list_job_triggers(parent="parent_value",) @@ -4981,12 +4756,11 @@ def test_list_job_triggers_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" def test_list_job_triggers_flattened_error(): - client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -4998,7 +4772,7 @@ def test_list_job_triggers_flattened_error(): @pytest.mark.asyncio async def test_list_job_triggers_flattened_async(): - client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -5018,13 +4792,12 @@ async def test_list_job_triggers_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" @pytest.mark.asyncio async def test_list_job_triggers_flattened_error_async(): - client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -5035,7 +4808,7 @@ async def test_list_job_triggers_flattened_error_async(): def test_list_job_triggers_pager(): - client = DlpServiceClient(credentials=credentials.AnonymousCredentials,) + client = DlpServiceClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -5071,7 +4844,7 @@ def test_list_job_triggers_pager(): def test_list_job_triggers_pages(): - client = DlpServiceClient(credentials=credentials.AnonymousCredentials,) + client = DlpServiceClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -5099,7 +4872,7 @@ def test_list_job_triggers_pages(): @pytest.mark.asyncio async def test_list_job_triggers_async_pager(): - client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials,) + client = DlpServiceAsyncClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -5134,7 +4907,7 @@ async def test_list_job_triggers_async_pager(): @pytest.mark.asyncio async def test_list_job_triggers_async_pages(): - client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials,) + client = DlpServiceAsyncClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -5168,7 +4941,7 @@ def test_delete_job_trigger( transport: str = "grpc", request_type=dlp.DeleteJobTriggerRequest ): client = DlpServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -5181,13 +4954,11 @@ def test_delete_job_trigger( ) as call: # Designate an appropriate return value for the call. call.return_value = None - response = client.delete_job_trigger(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == dlp.DeleteJobTriggerRequest() # Establish that the response is the type that we expect. @@ -5202,7 +4973,7 @@ def test_delete_job_trigger_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DlpServiceClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -5212,7 +4983,6 @@ def test_delete_job_trigger_empty_call(): client.delete_job_trigger() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == dlp.DeleteJobTriggerRequest() @@ -5221,7 +4991,7 @@ async def test_delete_job_trigger_async( transport: str = "grpc_asyncio", request_type=dlp.DeleteJobTriggerRequest ): client = DlpServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -5234,13 +5004,11 @@ async def test_delete_job_trigger_async( ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_job_trigger(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == dlp.DeleteJobTriggerRequest() # Establish that the response is the type that we expect. @@ -5253,11 +5021,12 @@ async def test_delete_job_trigger_async_from_dict(): def test_delete_job_trigger_field_headers(): - client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = dlp.DeleteJobTriggerRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -5265,7 +5034,6 @@ def test_delete_job_trigger_field_headers(): type(client.transport.delete_job_trigger), "__call__" ) as call: call.return_value = None - client.delete_job_trigger(request) # Establish that the underlying gRPC stub method was called. @@ -5280,11 +5048,12 @@ def test_delete_job_trigger_field_headers(): @pytest.mark.asyncio async def test_delete_job_trigger_field_headers_async(): - client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = dlp.DeleteJobTriggerRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -5292,7 +5061,6 @@ async def test_delete_job_trigger_field_headers_async(): type(client.transport.delete_job_trigger), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_job_trigger(request) # Establish that the underlying gRPC stub method was called. @@ -5306,7 +5074,7 @@ async def test_delete_job_trigger_field_headers_async(): def test_delete_job_trigger_flattened(): - client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -5314,7 +5082,6 @@ def test_delete_job_trigger_flattened(): ) as call: # Designate an appropriate return value for the call. call.return_value = None - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.delete_job_trigger(name="name_value",) @@ -5323,12 +5090,11 @@ def test_delete_job_trigger_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" def test_delete_job_trigger_flattened_error(): - client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -5340,7 +5106,7 @@ def test_delete_job_trigger_flattened_error(): @pytest.mark.asyncio async def test_delete_job_trigger_flattened_async(): - client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -5358,13 +5124,12 @@ async def test_delete_job_trigger_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" @pytest.mark.asyncio async def test_delete_job_trigger_flattened_error_async(): - client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -5378,7 +5143,7 @@ def test_activate_job_trigger( transport: str = "grpc", request_type=dlp.ActivateJobTriggerRequest ): client = DlpServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -5403,25 +5168,18 @@ def test_activate_job_trigger( ) ), ) - response = client.activate_job_trigger(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == dlp.ActivateJobTriggerRequest() # Establish that the response is the type that we expect. - assert isinstance(response, dlp.DlpJob) - assert response.name == "name_value" - assert response.type_ == dlp.DlpJobType.INSPECT_JOB - assert response.state == dlp.DlpJob.JobState.PENDING - assert response.job_trigger_name == "job_trigger_name_value" @@ -5433,7 +5191,7 @@ def test_activate_job_trigger_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DlpServiceClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -5443,7 +5201,6 @@ def test_activate_job_trigger_empty_call(): client.activate_job_trigger() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == dlp.ActivateJobTriggerRequest() @@ -5452,7 +5209,7 @@ async def test_activate_job_trigger_async( transport: str = "grpc_asyncio", request_type=dlp.ActivateJobTriggerRequest ): client = DlpServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -5472,24 +5229,18 @@ async def test_activate_job_trigger_async( job_trigger_name="job_trigger_name_value", ) ) - response = await client.activate_job_trigger(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == dlp.ActivateJobTriggerRequest() # Establish that the response is the type that we expect. assert isinstance(response, dlp.DlpJob) - assert response.name == "name_value" - assert response.type_ == dlp.DlpJobType.INSPECT_JOB - assert response.state == dlp.DlpJob.JobState.PENDING - assert response.job_trigger_name == "job_trigger_name_value" @@ -5499,11 +5250,12 @@ async def test_activate_job_trigger_async_from_dict(): def test_activate_job_trigger_field_headers(): - client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = dlp.ActivateJobTriggerRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -5511,7 +5263,6 @@ def test_activate_job_trigger_field_headers(): type(client.transport.activate_job_trigger), "__call__" ) as call: call.return_value = dlp.DlpJob() - client.activate_job_trigger(request) # Establish that the underlying gRPC stub method was called. @@ -5526,11 +5277,12 @@ def test_activate_job_trigger_field_headers(): @pytest.mark.asyncio async def test_activate_job_trigger_field_headers_async(): - client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = dlp.ActivateJobTriggerRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -5538,7 +5290,6 @@ async def test_activate_job_trigger_field_headers_async(): type(client.transport.activate_job_trigger), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DlpJob()) - await client.activate_job_trigger(request) # Establish that the underlying gRPC stub method was called. @@ -5553,7 +5304,7 @@ async def test_activate_job_trigger_field_headers_async(): def test_create_dlp_job(transport: str = "grpc", request_type=dlp.CreateDlpJobRequest): client = DlpServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -5576,25 +5327,18 @@ def test_create_dlp_job(transport: str = "grpc", request_type=dlp.CreateDlpJobRe ) ), ) - response = client.create_dlp_job(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == dlp.CreateDlpJobRequest() # Establish that the response is the type that we expect. - assert isinstance(response, dlp.DlpJob) - assert response.name == "name_value" - assert response.type_ == dlp.DlpJobType.INSPECT_JOB - assert response.state == dlp.DlpJob.JobState.PENDING - assert response.job_trigger_name == "job_trigger_name_value" @@ -5606,7 +5350,7 @@ def test_create_dlp_job_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DlpServiceClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -5614,7 +5358,6 @@ def test_create_dlp_job_empty_call(): client.create_dlp_job() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == dlp.CreateDlpJobRequest() @@ -5623,7 +5366,7 @@ async def test_create_dlp_job_async( transport: str = "grpc_asyncio", request_type=dlp.CreateDlpJobRequest ): client = DlpServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -5641,24 +5384,18 @@ async def test_create_dlp_job_async( job_trigger_name="job_trigger_name_value", ) ) - response = await client.create_dlp_job(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == dlp.CreateDlpJobRequest() # Establish that the response is the type that we expect. assert isinstance(response, dlp.DlpJob) - assert response.name == "name_value" - assert response.type_ == dlp.DlpJobType.INSPECT_JOB - assert response.state == dlp.DlpJob.JobState.PENDING - assert response.job_trigger_name == "job_trigger_name_value" @@ -5668,17 +5405,17 @@ async def test_create_dlp_job_async_from_dict(): def test_create_dlp_job_field_headers(): - client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = dlp.CreateDlpJobRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_dlp_job), "__call__") as call: call.return_value = dlp.DlpJob() - client.create_dlp_job(request) # Establish that the underlying gRPC stub method was called. @@ -5693,17 +5430,17 @@ def test_create_dlp_job_field_headers(): @pytest.mark.asyncio async def test_create_dlp_job_field_headers_async(): - client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = dlp.CreateDlpJobRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_dlp_job), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DlpJob()) - await client.create_dlp_job(request) # Establish that the underlying gRPC stub method was called. @@ -5717,13 +5454,12 @@ async def test_create_dlp_job_field_headers_async(): def test_create_dlp_job_flattened(): - client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_dlp_job), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = dlp.DlpJob() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.create_dlp_job( @@ -5748,9 +5484,7 @@ def test_create_dlp_job_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" - assert args[0].risk_job == dlp.RiskAnalysisJobConfig( privacy_metric=dlp.PrivacyMetric( numerical_stats_config=dlp.PrivacyMetric.NumericalStatsConfig( @@ -5761,7 +5495,7 @@ def test_create_dlp_job_flattened(): def test_create_dlp_job_flattened_error(): - client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -5788,7 +5522,7 @@ def test_create_dlp_job_flattened_error(): @pytest.mark.asyncio async def test_create_dlp_job_flattened_async(): - client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_dlp_job), "__call__") as call: @@ -5820,9 +5554,7 @@ async def test_create_dlp_job_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" - assert args[0].risk_job == dlp.RiskAnalysisJobConfig( privacy_metric=dlp.PrivacyMetric( numerical_stats_config=dlp.PrivacyMetric.NumericalStatsConfig( @@ -5834,7 +5566,7 @@ async def test_create_dlp_job_flattened_async(): @pytest.mark.asyncio async def test_create_dlp_job_flattened_error_async(): - client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -5861,7 +5593,7 @@ async def test_create_dlp_job_flattened_error_async(): def test_list_dlp_jobs(transport: str = "grpc", request_type=dlp.ListDlpJobsRequest): client = DlpServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -5874,19 +5606,15 @@ def test_list_dlp_jobs(transport: str = "grpc", request_type=dlp.ListDlpJobsRequ call.return_value = dlp.ListDlpJobsResponse( next_page_token="next_page_token_value", ) - response = client.list_dlp_jobs(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == dlp.ListDlpJobsRequest() # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDlpJobsPager) - assert response.next_page_token == "next_page_token_value" @@ -5898,7 +5626,7 @@ def test_list_dlp_jobs_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DlpServiceClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -5906,7 +5634,6 @@ def test_list_dlp_jobs_empty_call(): client.list_dlp_jobs() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == dlp.ListDlpJobsRequest() @@ -5915,7 +5642,7 @@ async def test_list_dlp_jobs_async( transport: str = "grpc_asyncio", request_type=dlp.ListDlpJobsRequest ): client = DlpServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -5928,18 +5655,15 @@ async def test_list_dlp_jobs_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( dlp.ListDlpJobsResponse(next_page_token="next_page_token_value",) ) - response = await client.list_dlp_jobs(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == dlp.ListDlpJobsRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListDlpJobsAsyncPager) - assert response.next_page_token == "next_page_token_value" @@ -5949,17 +5673,17 @@ async def test_list_dlp_jobs_async_from_dict(): def test_list_dlp_jobs_field_headers(): - client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = dlp.ListDlpJobsRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_dlp_jobs), "__call__") as call: call.return_value = dlp.ListDlpJobsResponse() - client.list_dlp_jobs(request) # Establish that the underlying gRPC stub method was called. @@ -5974,11 +5698,12 @@ def test_list_dlp_jobs_field_headers(): @pytest.mark.asyncio async def test_list_dlp_jobs_field_headers_async(): - client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = dlp.ListDlpJobsRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -5986,7 +5711,6 @@ async def test_list_dlp_jobs_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( dlp.ListDlpJobsResponse() ) - await client.list_dlp_jobs(request) # Establish that the underlying gRPC stub method was called. @@ -6000,13 +5724,12 @@ async def test_list_dlp_jobs_field_headers_async(): def test_list_dlp_jobs_flattened(): - client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_dlp_jobs), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = dlp.ListDlpJobsResponse() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.list_dlp_jobs(parent="parent_value",) @@ -6015,12 +5738,11 @@ def test_list_dlp_jobs_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" def test_list_dlp_jobs_flattened_error(): - client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -6032,7 +5754,7 @@ def test_list_dlp_jobs_flattened_error(): @pytest.mark.asyncio async def test_list_dlp_jobs_flattened_async(): - client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_dlp_jobs), "__call__") as call: @@ -6050,13 +5772,12 @@ async def test_list_dlp_jobs_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" @pytest.mark.asyncio async def test_list_dlp_jobs_flattened_error_async(): - client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -6067,7 +5788,7 @@ async def test_list_dlp_jobs_flattened_error_async(): def test_list_dlp_jobs_pager(): - client = DlpServiceClient(credentials=credentials.AnonymousCredentials,) + client = DlpServiceClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_dlp_jobs), "__call__") as call: @@ -6096,7 +5817,7 @@ def test_list_dlp_jobs_pager(): def test_list_dlp_jobs_pages(): - client = DlpServiceClient(credentials=credentials.AnonymousCredentials,) + client = DlpServiceClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_dlp_jobs), "__call__") as call: @@ -6117,7 +5838,7 @@ def test_list_dlp_jobs_pages(): @pytest.mark.asyncio async def test_list_dlp_jobs_async_pager(): - client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials,) + client = DlpServiceAsyncClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -6145,7 +5866,7 @@ async def test_list_dlp_jobs_async_pager(): @pytest.mark.asyncio async def test_list_dlp_jobs_async_pages(): - client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials,) + client = DlpServiceAsyncClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -6170,7 +5891,7 @@ async def test_list_dlp_jobs_async_pages(): def test_get_dlp_job(transport: str = "grpc", request_type=dlp.GetDlpJobRequest): client = DlpServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -6193,25 +5914,18 @@ def test_get_dlp_job(transport: str = "grpc", request_type=dlp.GetDlpJobRequest) ) ), ) - response = client.get_dlp_job(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == dlp.GetDlpJobRequest() # Establish that the response is the type that we expect. - assert isinstance(response, dlp.DlpJob) - assert response.name == "name_value" - assert response.type_ == dlp.DlpJobType.INSPECT_JOB - assert response.state == dlp.DlpJob.JobState.PENDING - assert response.job_trigger_name == "job_trigger_name_value" @@ -6223,7 +5937,7 @@ def test_get_dlp_job_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DlpServiceClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -6231,7 +5945,6 @@ def test_get_dlp_job_empty_call(): client.get_dlp_job() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == dlp.GetDlpJobRequest() @@ -6240,7 +5953,7 @@ async def test_get_dlp_job_async( transport: str = "grpc_asyncio", request_type=dlp.GetDlpJobRequest ): client = DlpServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -6258,24 +5971,18 @@ async def test_get_dlp_job_async( job_trigger_name="job_trigger_name_value", ) ) - response = await client.get_dlp_job(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == dlp.GetDlpJobRequest() # Establish that the response is the type that we expect. assert isinstance(response, dlp.DlpJob) - assert response.name == "name_value" - assert response.type_ == dlp.DlpJobType.INSPECT_JOB - assert response.state == dlp.DlpJob.JobState.PENDING - assert response.job_trigger_name == "job_trigger_name_value" @@ -6285,17 +5992,17 @@ async def test_get_dlp_job_async_from_dict(): def test_get_dlp_job_field_headers(): - client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = dlp.GetDlpJobRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_dlp_job), "__call__") as call: call.return_value = dlp.DlpJob() - client.get_dlp_job(request) # Establish that the underlying gRPC stub method was called. @@ -6310,17 +6017,17 @@ def test_get_dlp_job_field_headers(): @pytest.mark.asyncio async def test_get_dlp_job_field_headers_async(): - client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = dlp.GetDlpJobRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_dlp_job), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DlpJob()) - await client.get_dlp_job(request) # Establish that the underlying gRPC stub method was called. @@ -6334,13 +6041,12 @@ async def test_get_dlp_job_field_headers_async(): def test_get_dlp_job_flattened(): - client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_dlp_job), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = dlp.DlpJob() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.get_dlp_job(name="name_value",) @@ -6349,12 +6055,11 @@ def test_get_dlp_job_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" def test_get_dlp_job_flattened_error(): - client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -6366,7 +6071,7 @@ def test_get_dlp_job_flattened_error(): @pytest.mark.asyncio async def test_get_dlp_job_flattened_async(): - client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_dlp_job), "__call__") as call: @@ -6382,13 +6087,12 @@ async def test_get_dlp_job_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" @pytest.mark.asyncio async def test_get_dlp_job_flattened_error_async(): - client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -6400,7 +6104,7 @@ async def test_get_dlp_job_flattened_error_async(): def test_delete_dlp_job(transport: str = "grpc", request_type=dlp.DeleteDlpJobRequest): client = DlpServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -6411,13 +6115,11 @@ def test_delete_dlp_job(transport: str = "grpc", request_type=dlp.DeleteDlpJobRe with mock.patch.object(type(client.transport.delete_dlp_job), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = None - response = client.delete_dlp_job(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == dlp.DeleteDlpJobRequest() # Establish that the response is the type that we expect. @@ -6432,7 +6134,7 @@ def test_delete_dlp_job_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DlpServiceClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -6440,7 +6142,6 @@ def test_delete_dlp_job_empty_call(): client.delete_dlp_job() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == dlp.DeleteDlpJobRequest() @@ -6449,7 +6150,7 @@ async def test_delete_dlp_job_async( transport: str = "grpc_asyncio", request_type=dlp.DeleteDlpJobRequest ): client = DlpServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -6460,13 +6161,11 @@ async def test_delete_dlp_job_async( with mock.patch.object(type(client.transport.delete_dlp_job), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_dlp_job(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == dlp.DeleteDlpJobRequest() # Establish that the response is the type that we expect. @@ -6479,17 +6178,17 @@ async def test_delete_dlp_job_async_from_dict(): def test_delete_dlp_job_field_headers(): - client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = dlp.DeleteDlpJobRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_dlp_job), "__call__") as call: call.return_value = None - client.delete_dlp_job(request) # Establish that the underlying gRPC stub method was called. @@ -6504,17 +6203,17 @@ def test_delete_dlp_job_field_headers(): @pytest.mark.asyncio async def test_delete_dlp_job_field_headers_async(): - client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = dlp.DeleteDlpJobRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_dlp_job), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_dlp_job(request) # Establish that the underlying gRPC stub method was called. @@ -6528,13 +6227,12 @@ async def test_delete_dlp_job_field_headers_async(): def test_delete_dlp_job_flattened(): - client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_dlp_job), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = None - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.delete_dlp_job(name="name_value",) @@ -6543,12 +6241,11 @@ def test_delete_dlp_job_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" def test_delete_dlp_job_flattened_error(): - client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -6560,7 +6257,7 @@ def test_delete_dlp_job_flattened_error(): @pytest.mark.asyncio async def test_delete_dlp_job_flattened_async(): - client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_dlp_job), "__call__") as call: @@ -6576,13 +6273,12 @@ async def test_delete_dlp_job_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" @pytest.mark.asyncio async def test_delete_dlp_job_flattened_error_async(): - client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -6594,7 +6290,7 @@ async def test_delete_dlp_job_flattened_error_async(): def test_cancel_dlp_job(transport: str = "grpc", request_type=dlp.CancelDlpJobRequest): client = DlpServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -6605,13 +6301,11 @@ def test_cancel_dlp_job(transport: str = "grpc", request_type=dlp.CancelDlpJobRe with mock.patch.object(type(client.transport.cancel_dlp_job), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = None - response = client.cancel_dlp_job(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == dlp.CancelDlpJobRequest() # Establish that the response is the type that we expect. @@ -6626,7 +6320,7 @@ def test_cancel_dlp_job_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DlpServiceClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -6634,7 +6328,6 @@ def test_cancel_dlp_job_empty_call(): client.cancel_dlp_job() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == dlp.CancelDlpJobRequest() @@ -6643,7 +6336,7 @@ async def test_cancel_dlp_job_async( transport: str = "grpc_asyncio", request_type=dlp.CancelDlpJobRequest ): client = DlpServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -6654,13 +6347,11 @@ async def test_cancel_dlp_job_async( with mock.patch.object(type(client.transport.cancel_dlp_job), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.cancel_dlp_job(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == dlp.CancelDlpJobRequest() # Establish that the response is the type that we expect. @@ -6673,17 +6364,17 @@ async def test_cancel_dlp_job_async_from_dict(): def test_cancel_dlp_job_field_headers(): - client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = dlp.CancelDlpJobRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.cancel_dlp_job), "__call__") as call: call.return_value = None - client.cancel_dlp_job(request) # Establish that the underlying gRPC stub method was called. @@ -6698,17 +6389,17 @@ def test_cancel_dlp_job_field_headers(): @pytest.mark.asyncio async def test_cancel_dlp_job_field_headers_async(): - client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = dlp.CancelDlpJobRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.cancel_dlp_job), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.cancel_dlp_job(request) # Establish that the underlying gRPC stub method was called. @@ -6725,7 +6416,7 @@ def test_create_stored_info_type( transport: str = "grpc", request_type=dlp.CreateStoredInfoTypeRequest ): client = DlpServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -6738,19 +6429,15 @@ def test_create_stored_info_type( ) as call: # Designate an appropriate return value for the call. call.return_value = dlp.StoredInfoType(name="name_value",) - response = client.create_stored_info_type(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == dlp.CreateStoredInfoTypeRequest() # Establish that the response is the type that we expect. - assert isinstance(response, dlp.StoredInfoType) - assert response.name == "name_value" @@ -6762,7 +6449,7 @@ def test_create_stored_info_type_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DlpServiceClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -6772,7 +6459,6 @@ def test_create_stored_info_type_empty_call(): client.create_stored_info_type() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == dlp.CreateStoredInfoTypeRequest() @@ -6781,7 +6467,7 @@ async def test_create_stored_info_type_async( transport: str = "grpc_asyncio", request_type=dlp.CreateStoredInfoTypeRequest ): client = DlpServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -6796,18 +6482,15 @@ async def test_create_stored_info_type_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( dlp.StoredInfoType(name="name_value",) ) - response = await client.create_stored_info_type(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == dlp.CreateStoredInfoTypeRequest() # Establish that the response is the type that we expect. assert isinstance(response, dlp.StoredInfoType) - assert response.name == "name_value" @@ -6817,11 +6500,12 @@ async def test_create_stored_info_type_async_from_dict(): def test_create_stored_info_type_field_headers(): - client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = dlp.CreateStoredInfoTypeRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -6829,7 +6513,6 @@ def test_create_stored_info_type_field_headers(): type(client.transport.create_stored_info_type), "__call__" ) as call: call.return_value = dlp.StoredInfoType() - client.create_stored_info_type(request) # Establish that the underlying gRPC stub method was called. @@ -6844,11 +6527,12 @@ def test_create_stored_info_type_field_headers(): @pytest.mark.asyncio async def test_create_stored_info_type_field_headers_async(): - client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = dlp.CreateStoredInfoTypeRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -6856,7 +6540,6 @@ async def test_create_stored_info_type_field_headers_async(): type(client.transport.create_stored_info_type), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.StoredInfoType()) - await client.create_stored_info_type(request) # Establish that the underlying gRPC stub method was called. @@ -6870,7 +6553,7 @@ async def test_create_stored_info_type_field_headers_async(): def test_create_stored_info_type_flattened(): - client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -6878,7 +6561,6 @@ def test_create_stored_info_type_flattened(): ) as call: # Designate an appropriate return value for the call. call.return_value = dlp.StoredInfoType() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.create_stored_info_type( @@ -6890,16 +6572,14 @@ def test_create_stored_info_type_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" - assert args[0].config == dlp.StoredInfoTypeConfig( display_name="display_name_value" ) def test_create_stored_info_type_flattened_error(): - client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -6913,7 +6593,7 @@ def test_create_stored_info_type_flattened_error(): @pytest.mark.asyncio async def test_create_stored_info_type_flattened_async(): - client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -6934,9 +6614,7 @@ async def test_create_stored_info_type_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" - assert args[0].config == dlp.StoredInfoTypeConfig( display_name="display_name_value" ) @@ -6944,7 +6622,7 @@ async def test_create_stored_info_type_flattened_async(): @pytest.mark.asyncio async def test_create_stored_info_type_flattened_error_async(): - client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -6960,7 +6638,7 @@ def test_update_stored_info_type( transport: str = "grpc", request_type=dlp.UpdateStoredInfoTypeRequest ): client = DlpServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -6973,19 +6651,15 @@ def test_update_stored_info_type( ) as call: # Designate an appropriate return value for the call. call.return_value = dlp.StoredInfoType(name="name_value",) - response = client.update_stored_info_type(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == dlp.UpdateStoredInfoTypeRequest() # Establish that the response is the type that we expect. - assert isinstance(response, dlp.StoredInfoType) - assert response.name == "name_value" @@ -6997,7 +6671,7 @@ def test_update_stored_info_type_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DlpServiceClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -7007,7 +6681,6 @@ def test_update_stored_info_type_empty_call(): client.update_stored_info_type() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == dlp.UpdateStoredInfoTypeRequest() @@ -7016,7 +6689,7 @@ async def test_update_stored_info_type_async( transport: str = "grpc_asyncio", request_type=dlp.UpdateStoredInfoTypeRequest ): client = DlpServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -7031,18 +6704,15 @@ async def test_update_stored_info_type_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( dlp.StoredInfoType(name="name_value",) ) - response = await client.update_stored_info_type(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == dlp.UpdateStoredInfoTypeRequest() # Establish that the response is the type that we expect. assert isinstance(response, dlp.StoredInfoType) - assert response.name == "name_value" @@ -7052,11 +6722,12 @@ async def test_update_stored_info_type_async_from_dict(): def test_update_stored_info_type_field_headers(): - client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = dlp.UpdateStoredInfoTypeRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -7064,7 +6735,6 @@ def test_update_stored_info_type_field_headers(): type(client.transport.update_stored_info_type), "__call__" ) as call: call.return_value = dlp.StoredInfoType() - client.update_stored_info_type(request) # Establish that the underlying gRPC stub method was called. @@ -7079,11 +6749,12 @@ def test_update_stored_info_type_field_headers(): @pytest.mark.asyncio async def test_update_stored_info_type_field_headers_async(): - client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = dlp.UpdateStoredInfoTypeRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -7091,7 +6762,6 @@ async def test_update_stored_info_type_field_headers_async(): type(client.transport.update_stored_info_type), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.StoredInfoType()) - await client.update_stored_info_type(request) # Establish that the underlying gRPC stub method was called. @@ -7105,7 +6775,7 @@ async def test_update_stored_info_type_field_headers_async(): def test_update_stored_info_type_flattened(): - client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -7113,31 +6783,27 @@ def test_update_stored_info_type_flattened(): ) as call: # Designate an appropriate return value for the call. call.return_value = dlp.StoredInfoType() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.update_stored_info_type( name="name_value", config=dlp.StoredInfoTypeConfig(display_name="display_name_value"), - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" - assert args[0].config == dlp.StoredInfoTypeConfig( display_name="display_name_value" ) - - assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"]) + assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) def test_update_stored_info_type_flattened_error(): - client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -7146,13 +6812,13 @@ def test_update_stored_info_type_flattened_error(): dlp.UpdateStoredInfoTypeRequest(), name="name_value", config=dlp.StoredInfoTypeConfig(display_name="display_name_value"), - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.asyncio async def test_update_stored_info_type_flattened_async(): - client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -7167,26 +6833,23 @@ async def test_update_stored_info_type_flattened_async(): response = await client.update_stored_info_type( name="name_value", config=dlp.StoredInfoTypeConfig(display_name="display_name_value"), - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" - assert args[0].config == dlp.StoredInfoTypeConfig( display_name="display_name_value" ) - - assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"]) + assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) @pytest.mark.asyncio async def test_update_stored_info_type_flattened_error_async(): - client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -7195,7 +6858,7 @@ async def test_update_stored_info_type_flattened_error_async(): dlp.UpdateStoredInfoTypeRequest(), name="name_value", config=dlp.StoredInfoTypeConfig(display_name="display_name_value"), - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @@ -7203,7 +6866,7 @@ def test_get_stored_info_type( transport: str = "grpc", request_type=dlp.GetStoredInfoTypeRequest ): client = DlpServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -7216,19 +6879,15 @@ def test_get_stored_info_type( ) as call: # Designate an appropriate return value for the call. call.return_value = dlp.StoredInfoType(name="name_value",) - response = client.get_stored_info_type(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == dlp.GetStoredInfoTypeRequest() # Establish that the response is the type that we expect. - assert isinstance(response, dlp.StoredInfoType) - assert response.name == "name_value" @@ -7240,7 +6899,7 @@ def test_get_stored_info_type_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DlpServiceClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -7250,7 +6909,6 @@ def test_get_stored_info_type_empty_call(): client.get_stored_info_type() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == dlp.GetStoredInfoTypeRequest() @@ -7259,7 +6917,7 @@ async def test_get_stored_info_type_async( transport: str = "grpc_asyncio", request_type=dlp.GetStoredInfoTypeRequest ): client = DlpServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -7274,18 +6932,15 @@ async def test_get_stored_info_type_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( dlp.StoredInfoType(name="name_value",) ) - response = await client.get_stored_info_type(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == dlp.GetStoredInfoTypeRequest() # Establish that the response is the type that we expect. assert isinstance(response, dlp.StoredInfoType) - assert response.name == "name_value" @@ -7295,11 +6950,12 @@ async def test_get_stored_info_type_async_from_dict(): def test_get_stored_info_type_field_headers(): - client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = dlp.GetStoredInfoTypeRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -7307,7 +6963,6 @@ def test_get_stored_info_type_field_headers(): type(client.transport.get_stored_info_type), "__call__" ) as call: call.return_value = dlp.StoredInfoType() - client.get_stored_info_type(request) # Establish that the underlying gRPC stub method was called. @@ -7322,11 +6977,12 @@ def test_get_stored_info_type_field_headers(): @pytest.mark.asyncio async def test_get_stored_info_type_field_headers_async(): - client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = dlp.GetStoredInfoTypeRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -7334,7 +6990,6 @@ async def test_get_stored_info_type_field_headers_async(): type(client.transport.get_stored_info_type), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.StoredInfoType()) - await client.get_stored_info_type(request) # Establish that the underlying gRPC stub method was called. @@ -7348,7 +7003,7 @@ async def test_get_stored_info_type_field_headers_async(): def test_get_stored_info_type_flattened(): - client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -7356,7 +7011,6 @@ def test_get_stored_info_type_flattened(): ) as call: # Designate an appropriate return value for the call. call.return_value = dlp.StoredInfoType() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.get_stored_info_type(name="name_value",) @@ -7365,12 +7019,11 @@ def test_get_stored_info_type_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" def test_get_stored_info_type_flattened_error(): - client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -7382,7 +7035,7 @@ def test_get_stored_info_type_flattened_error(): @pytest.mark.asyncio async def test_get_stored_info_type_flattened_async(): - client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -7400,13 +7053,12 @@ async def test_get_stored_info_type_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" @pytest.mark.asyncio async def test_get_stored_info_type_flattened_error_async(): - client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -7420,7 +7072,7 @@ def test_list_stored_info_types( transport: str = "grpc", request_type=dlp.ListStoredInfoTypesRequest ): client = DlpServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -7435,19 +7087,15 @@ def test_list_stored_info_types( call.return_value = dlp.ListStoredInfoTypesResponse( next_page_token="next_page_token_value", ) - response = client.list_stored_info_types(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == dlp.ListStoredInfoTypesRequest() # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListStoredInfoTypesPager) - assert response.next_page_token == "next_page_token_value" @@ -7459,7 +7107,7 @@ def test_list_stored_info_types_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DlpServiceClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -7469,7 +7117,6 @@ def test_list_stored_info_types_empty_call(): client.list_stored_info_types() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == dlp.ListStoredInfoTypesRequest() @@ -7478,7 +7125,7 @@ async def test_list_stored_info_types_async( transport: str = "grpc_asyncio", request_type=dlp.ListStoredInfoTypesRequest ): client = DlpServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -7493,18 +7140,15 @@ async def test_list_stored_info_types_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( dlp.ListStoredInfoTypesResponse(next_page_token="next_page_token_value",) ) - response = await client.list_stored_info_types(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == dlp.ListStoredInfoTypesRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListStoredInfoTypesAsyncPager) - assert response.next_page_token == "next_page_token_value" @@ -7514,11 +7158,12 @@ async def test_list_stored_info_types_async_from_dict(): def test_list_stored_info_types_field_headers(): - client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = dlp.ListStoredInfoTypesRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -7526,7 +7171,6 @@ def test_list_stored_info_types_field_headers(): type(client.transport.list_stored_info_types), "__call__" ) as call: call.return_value = dlp.ListStoredInfoTypesResponse() - client.list_stored_info_types(request) # Establish that the underlying gRPC stub method was called. @@ -7541,11 +7185,12 @@ def test_list_stored_info_types_field_headers(): @pytest.mark.asyncio async def test_list_stored_info_types_field_headers_async(): - client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = dlp.ListStoredInfoTypesRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -7555,7 +7200,6 @@ async def test_list_stored_info_types_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( dlp.ListStoredInfoTypesResponse() ) - await client.list_stored_info_types(request) # Establish that the underlying gRPC stub method was called. @@ -7569,7 +7213,7 @@ async def test_list_stored_info_types_field_headers_async(): def test_list_stored_info_types_flattened(): - client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -7577,7 +7221,6 @@ def test_list_stored_info_types_flattened(): ) as call: # Designate an appropriate return value for the call. call.return_value = dlp.ListStoredInfoTypesResponse() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.list_stored_info_types(parent="parent_value",) @@ -7586,12 +7229,11 @@ def test_list_stored_info_types_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" def test_list_stored_info_types_flattened_error(): - client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -7603,7 +7245,7 @@ def test_list_stored_info_types_flattened_error(): @pytest.mark.asyncio async def test_list_stored_info_types_flattened_async(): - client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -7623,13 +7265,12 @@ async def test_list_stored_info_types_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" @pytest.mark.asyncio async def test_list_stored_info_types_flattened_error_async(): - client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -7640,7 +7281,7 @@ async def test_list_stored_info_types_flattened_error_async(): def test_list_stored_info_types_pager(): - client = DlpServiceClient(credentials=credentials.AnonymousCredentials,) + client = DlpServiceClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -7682,7 +7323,7 @@ def test_list_stored_info_types_pager(): def test_list_stored_info_types_pages(): - client = DlpServiceClient(credentials=credentials.AnonymousCredentials,) + client = DlpServiceClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -7716,7 +7357,7 @@ def test_list_stored_info_types_pages(): @pytest.mark.asyncio async def test_list_stored_info_types_async_pager(): - client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials,) + client = DlpServiceAsyncClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -7757,7 +7398,7 @@ async def test_list_stored_info_types_async_pager(): @pytest.mark.asyncio async def test_list_stored_info_types_async_pages(): - client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials,) + client = DlpServiceAsyncClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -7797,7 +7438,7 @@ def test_delete_stored_info_type( transport: str = "grpc", request_type=dlp.DeleteStoredInfoTypeRequest ): client = DlpServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -7810,13 +7451,11 @@ def test_delete_stored_info_type( ) as call: # Designate an appropriate return value for the call. call.return_value = None - response = client.delete_stored_info_type(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == dlp.DeleteStoredInfoTypeRequest() # Establish that the response is the type that we expect. @@ -7831,7 +7470,7 @@ def test_delete_stored_info_type_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DlpServiceClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -7841,7 +7480,6 @@ def test_delete_stored_info_type_empty_call(): client.delete_stored_info_type() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == dlp.DeleteStoredInfoTypeRequest() @@ -7850,7 +7488,7 @@ async def test_delete_stored_info_type_async( transport: str = "grpc_asyncio", request_type=dlp.DeleteStoredInfoTypeRequest ): client = DlpServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -7863,13 +7501,11 @@ async def test_delete_stored_info_type_async( ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_stored_info_type(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == dlp.DeleteStoredInfoTypeRequest() # Establish that the response is the type that we expect. @@ -7882,11 +7518,12 @@ async def test_delete_stored_info_type_async_from_dict(): def test_delete_stored_info_type_field_headers(): - client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = dlp.DeleteStoredInfoTypeRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -7894,7 +7531,6 @@ def test_delete_stored_info_type_field_headers(): type(client.transport.delete_stored_info_type), "__call__" ) as call: call.return_value = None - client.delete_stored_info_type(request) # Establish that the underlying gRPC stub method was called. @@ -7909,11 +7545,12 @@ def test_delete_stored_info_type_field_headers(): @pytest.mark.asyncio async def test_delete_stored_info_type_field_headers_async(): - client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = dlp.DeleteStoredInfoTypeRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -7921,7 +7558,6 @@ async def test_delete_stored_info_type_field_headers_async(): type(client.transport.delete_stored_info_type), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_stored_info_type(request) # Establish that the underlying gRPC stub method was called. @@ -7935,7 +7571,7 @@ async def test_delete_stored_info_type_field_headers_async(): def test_delete_stored_info_type_flattened(): - client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -7943,7 +7579,6 @@ def test_delete_stored_info_type_flattened(): ) as call: # Designate an appropriate return value for the call. call.return_value = None - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.delete_stored_info_type(name="name_value",) @@ -7952,12 +7587,11 @@ def test_delete_stored_info_type_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" def test_delete_stored_info_type_flattened_error(): - client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -7969,7 +7603,7 @@ def test_delete_stored_info_type_flattened_error(): @pytest.mark.asyncio async def test_delete_stored_info_type_flattened_async(): - client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -7987,13 +7621,12 @@ async def test_delete_stored_info_type_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" @pytest.mark.asyncio async def test_delete_stored_info_type_flattened_error_async(): - client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -8007,7 +7640,7 @@ def test_hybrid_inspect_dlp_job( transport: str = "grpc", request_type=dlp.HybridInspectDlpJobRequest ): client = DlpServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -8020,17 +7653,14 @@ def test_hybrid_inspect_dlp_job( ) as call: # Designate an appropriate return value for the call. call.return_value = dlp.HybridInspectResponse() - response = client.hybrid_inspect_dlp_job(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == dlp.HybridInspectDlpJobRequest() # Establish that the response is the type that we expect. - assert isinstance(response, dlp.HybridInspectResponse) @@ -8042,7 +7672,7 @@ def test_hybrid_inspect_dlp_job_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DlpServiceClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -8052,7 +7682,6 @@ def test_hybrid_inspect_dlp_job_empty_call(): client.hybrid_inspect_dlp_job() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == dlp.HybridInspectDlpJobRequest() @@ -8061,7 +7690,7 @@ async def test_hybrid_inspect_dlp_job_async( transport: str = "grpc_asyncio", request_type=dlp.HybridInspectDlpJobRequest ): client = DlpServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -8076,13 +7705,11 @@ async def test_hybrid_inspect_dlp_job_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( dlp.HybridInspectResponse() ) - response = await client.hybrid_inspect_dlp_job(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == dlp.HybridInspectDlpJobRequest() # Establish that the response is the type that we expect. @@ -8095,11 +7722,12 @@ async def test_hybrid_inspect_dlp_job_async_from_dict(): def test_hybrid_inspect_dlp_job_field_headers(): - client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = dlp.HybridInspectDlpJobRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -8107,7 +7735,6 @@ def test_hybrid_inspect_dlp_job_field_headers(): type(client.transport.hybrid_inspect_dlp_job), "__call__" ) as call: call.return_value = dlp.HybridInspectResponse() - client.hybrid_inspect_dlp_job(request) # Establish that the underlying gRPC stub method was called. @@ -8122,11 +7749,12 @@ def test_hybrid_inspect_dlp_job_field_headers(): @pytest.mark.asyncio async def test_hybrid_inspect_dlp_job_field_headers_async(): - client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = dlp.HybridInspectDlpJobRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -8136,7 +7764,6 @@ async def test_hybrid_inspect_dlp_job_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( dlp.HybridInspectResponse() ) - await client.hybrid_inspect_dlp_job(request) # Establish that the underlying gRPC stub method was called. @@ -8150,7 +7777,7 @@ async def test_hybrid_inspect_dlp_job_field_headers_async(): def test_hybrid_inspect_dlp_job_flattened(): - client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -8158,7 +7785,6 @@ def test_hybrid_inspect_dlp_job_flattened(): ) as call: # Designate an appropriate return value for the call. call.return_value = dlp.HybridInspectResponse() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.hybrid_inspect_dlp_job(name="name_value",) @@ -8167,12 +7793,11 @@ def test_hybrid_inspect_dlp_job_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" def test_hybrid_inspect_dlp_job_flattened_error(): - client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -8184,7 +7809,7 @@ def test_hybrid_inspect_dlp_job_flattened_error(): @pytest.mark.asyncio async def test_hybrid_inspect_dlp_job_flattened_async(): - client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -8204,13 +7829,12 @@ async def test_hybrid_inspect_dlp_job_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" @pytest.mark.asyncio async def test_hybrid_inspect_dlp_job_flattened_error_async(): - client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -8222,7 +7846,7 @@ async def test_hybrid_inspect_dlp_job_flattened_error_async(): def test_finish_dlp_job(transport: str = "grpc", request_type=dlp.FinishDlpJobRequest): client = DlpServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -8233,13 +7857,11 @@ def test_finish_dlp_job(transport: str = "grpc", request_type=dlp.FinishDlpJobRe with mock.patch.object(type(client.transport.finish_dlp_job), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = None - response = client.finish_dlp_job(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == dlp.FinishDlpJobRequest() # Establish that the response is the type that we expect. @@ -8254,7 +7876,7 @@ def test_finish_dlp_job_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DlpServiceClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -8262,7 +7884,6 @@ def test_finish_dlp_job_empty_call(): client.finish_dlp_job() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == dlp.FinishDlpJobRequest() @@ -8271,7 +7892,7 @@ async def test_finish_dlp_job_async( transport: str = "grpc_asyncio", request_type=dlp.FinishDlpJobRequest ): client = DlpServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -8282,13 +7903,11 @@ async def test_finish_dlp_job_async( with mock.patch.object(type(client.transport.finish_dlp_job), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.finish_dlp_job(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == dlp.FinishDlpJobRequest() # Establish that the response is the type that we expect. @@ -8301,17 +7920,17 @@ async def test_finish_dlp_job_async_from_dict(): def test_finish_dlp_job_field_headers(): - client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = dlp.FinishDlpJobRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.finish_dlp_job), "__call__") as call: call.return_value = None - client.finish_dlp_job(request) # Establish that the underlying gRPC stub method was called. @@ -8326,17 +7945,17 @@ def test_finish_dlp_job_field_headers(): @pytest.mark.asyncio async def test_finish_dlp_job_field_headers_async(): - client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = dlp.FinishDlpJobRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.finish_dlp_job), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.finish_dlp_job(request) # Establish that the underlying gRPC stub method was called. @@ -8352,16 +7971,16 @@ async def test_finish_dlp_job_field_headers_async(): def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.DlpServiceGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = DlpServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # It is an error to provide a credentials file and a transport instance. transport = transports.DlpServiceGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = DlpServiceClient( @@ -8371,7 +7990,7 @@ def test_credentials_transport_error(): # It is an error to provide scopes and a transport instance. transport = transports.DlpServiceGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = DlpServiceClient( @@ -8382,7 +8001,7 @@ def test_credentials_transport_error(): def test_transport_instance(): # A client may be instantiated with a custom transport instance. transport = transports.DlpServiceGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) client = DlpServiceClient(transport=transport) assert client.transport is transport @@ -8391,13 +8010,13 @@ def test_transport_instance(): def test_transport_get_channel(): # A client may be instantiated with a custom transport instance. transport = transports.DlpServiceGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) channel = transport.grpc_channel assert channel transport = transports.DlpServiceGrpcAsyncIOTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) channel = transport.grpc_channel assert channel @@ -8409,23 +8028,23 @@ def test_transport_get_channel(): ) def test_transport_adc(transport_class): # Test default credentials are used if not provided. - with mock.patch.object(auth, "default") as adc: - adc.return_value = (credentials.AnonymousCredentials(), None) + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport_class() adc.assert_called_once() def test_transport_grpc_default(): # A client should use the gRPC transport by default. - client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DlpServiceClient(credentials=ga_credentials.AnonymousCredentials(),) assert isinstance(client.transport, transports.DlpServiceGrpcTransport,) def test_dlp_service_base_transport_error(): # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(exceptions.DuplicateCredentialArgs): + with pytest.raises(core_exceptions.DuplicateCredentialArgs): transport = transports.DlpServiceTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), credentials_file="credentials.json", ) @@ -8437,7 +8056,7 @@ def test_dlp_service_base_transport(): ) as Transport: Transport.return_value = None transport = transports.DlpServiceTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Every method on the transport should just blindly @@ -8483,15 +8102,37 @@ def test_dlp_service_base_transport(): getattr(transport, method)(request=object()) +@requires_google_auth_gte_1_25_0 def test_dlp_service_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( - auth, "load_credentials_from_file" + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.dlp_v2.services.dlp_service.transports.DlpServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.DlpServiceTransport( + credentials_file="credentials.json", quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@requires_google_auth_lt_1_25_0 +def test_dlp_service_base_transport_with_credentials_file_old_google_auth(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True ) as load_creds, mock.patch( "google.cloud.dlp_v2.services.dlp_service.transports.DlpServiceTransport._prep_wrapped_messages" ) as Transport: Transport.return_value = None - load_creds.return_value = (credentials.AnonymousCredentials(), None) + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.DlpServiceTransport( credentials_file="credentials.json", quota_project_id="octopus", ) @@ -8504,19 +8145,33 @@ def test_dlp_service_base_transport_with_credentials_file(): def test_dlp_service_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(auth, "default") as adc, mock.patch( + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( "google.cloud.dlp_v2.services.dlp_service.transports.DlpServiceTransport._prep_wrapped_messages" ) as Transport: Transport.return_value = None - adc.return_value = (credentials.AnonymousCredentials(), None) + adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.DlpServiceTransport() adc.assert_called_once() +@requires_google_auth_gte_1_25_0 def test_dlp_service_auth_adc(): # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(auth, "default") as adc: - adc.return_value = (credentials.AnonymousCredentials(), None) + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + DlpServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@requires_google_auth_lt_1_25_0 +def test_dlp_service_auth_adc_old_google_auth(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) DlpServiceClient() adc.assert_called_once_with( scopes=("https://www.googleapis.com/auth/cloud-platform",), @@ -8524,26 +8179,156 @@ def test_dlp_service_auth_adc(): ) -def test_dlp_service_transport_auth_adc(): +@pytest.mark.parametrize( + "transport_class", + [transports.DlpServiceGrpcTransport, transports.DlpServiceGrpcAsyncIOTransport,], +) +@requires_google_auth_gte_1_25_0 +def test_dlp_service_transport_auth_adc(transport_class): # If credentials and host are not provided, the transport class should use # ADC credentials. - with mock.patch.object(auth, "default") as adc: - adc.return_value = (credentials.AnonymousCredentials(), None) - transports.DlpServiceGrpcTransport( - host="squid.clam.whelk", quota_project_id="octopus" + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", ) + + +@pytest.mark.parametrize( + "transport_class", + [transports.DlpServiceGrpcTransport, transports.DlpServiceGrpcAsyncIOTransport,], +) +@requires_google_auth_lt_1_25_0 +def test_dlp_service_transport_auth_adc_old_google_auth(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus") adc.assert_called_once_with( scopes=("https://www.googleapis.com/auth/cloud-platform",), quota_project_id="octopus", ) +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.DlpServiceGrpcTransport, grpc_helpers), + (transports.DlpServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +@requires_api_core_gte_1_26_0 +def test_dlp_service_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "dlp.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="dlp.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.DlpServiceGrpcTransport, grpc_helpers), + (transports.DlpServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +@requires_api_core_lt_1_26_0 +def test_dlp_service_transport_create_channel_old_api_core( + transport_class, grpc_helpers +): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus") + + create_channel.assert_called_with( + "dlp.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + scopes=("https://www.googleapis.com/auth/cloud-platform",), + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.DlpServiceGrpcTransport, grpc_helpers), + (transports.DlpServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +@requires_api_core_lt_1_26_0 +def test_dlp_service_transport_create_channel_user_scopes( + transport_class, grpc_helpers +): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "dlp.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + scopes=["1", "2"], + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + @pytest.mark.parametrize( "transport_class", [transports.DlpServiceGrpcTransport, transports.DlpServiceGrpcAsyncIOTransport], ) def test_dlp_service_grpc_transport_client_cert_source_for_mtls(transport_class): - cred = credentials.AnonymousCredentials() + cred = ga_credentials.AnonymousCredentials() # Check ssl_channel_credentials is used if provided. with mock.patch.object(transport_class, "create_channel") as mock_create_channel: @@ -8582,7 +8367,7 @@ def test_dlp_service_grpc_transport_client_cert_source_for_mtls(transport_class) def test_dlp_service_host_no_port(): client = DlpServiceClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions(api_endpoint="dlp.googleapis.com"), ) assert client.transport._host == "dlp.googleapis.com:443" @@ -8590,7 +8375,7 @@ def test_dlp_service_host_no_port(): def test_dlp_service_host_with_port(): client = DlpServiceClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="dlp.googleapis.com:8000" ), @@ -8641,9 +8426,9 @@ def test_dlp_service_transport_channel_mtls_with_client_cert_source(transport_cl mock_grpc_channel = mock.Mock() grpc_create_channel.return_value = mock_grpc_channel - cred = credentials.AnonymousCredentials() + cred = ga_credentials.AnonymousCredentials() with pytest.warns(DeprecationWarning): - with mock.patch.object(auth, "default") as adc: + with mock.patch.object(google.auth, "default") as adc: adc.return_value = (cred, None) transport = transport_class( host="squid.clam.whelk", @@ -8717,7 +8502,6 @@ def test_dlp_service_transport_channel_mtls_with_adc(transport_class): def test_deidentify_template_path(): organization = "squid" deidentify_template = "clam" - expected = "organizations/{organization}/deidentifyTemplates/{deidentify_template}".format( organization=organization, deidentify_template=deidentify_template, ) @@ -8741,7 +8525,6 @@ def test_parse_deidentify_template_path(): def test_dlp_content_path(): project = "oyster" - expected = "projects/{project}/dlpContent".format(project=project,) actual = DlpServiceClient.dlp_content_path(project) assert expected == actual @@ -8761,7 +8544,6 @@ def test_parse_dlp_content_path(): def test_dlp_job_path(): project = "cuttlefish" dlp_job = "mussel" - expected = "projects/{project}/dlpJobs/{dlp_job}".format( project=project, dlp_job=dlp_job, ) @@ -8785,7 +8567,6 @@ def test_finding_path(): project = "scallop" location = "abalone" finding = "squid" - expected = "projects/{project}/locations/{location}/findings/{finding}".format( project=project, location=location, finding=finding, ) @@ -8809,7 +8590,6 @@ def test_parse_finding_path(): def test_inspect_template_path(): organization = "oyster" inspect_template = "nudibranch" - expected = "organizations/{organization}/inspectTemplates/{inspect_template}".format( organization=organization, inspect_template=inspect_template, ) @@ -8832,7 +8612,6 @@ def test_parse_inspect_template_path(): def test_job_trigger_path(): project = "winkle" job_trigger = "nautilus" - expected = "projects/{project}/jobTriggers/{job_trigger}".format( project=project, job_trigger=job_trigger, ) @@ -8855,7 +8634,6 @@ def test_parse_job_trigger_path(): def test_stored_info_type_path(): organization = "squid" stored_info_type = "clam" - expected = "organizations/{organization}/storedInfoTypes/{stored_info_type}".format( organization=organization, stored_info_type=stored_info_type, ) @@ -8877,7 +8655,6 @@ def test_parse_stored_info_type_path(): def test_common_billing_account_path(): billing_account = "oyster" - expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -8898,7 +8675,6 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): folder = "cuttlefish" - expected = "folders/{folder}".format(folder=folder,) actual = DlpServiceClient.common_folder_path(folder) assert expected == actual @@ -8917,7 +8693,6 @@ def test_parse_common_folder_path(): def test_common_organization_path(): organization = "winkle" - expected = "organizations/{organization}".format(organization=organization,) actual = DlpServiceClient.common_organization_path(organization) assert expected == actual @@ -8936,7 +8711,6 @@ def test_parse_common_organization_path(): def test_common_project_path(): project = "scallop" - expected = "projects/{project}".format(project=project,) actual = DlpServiceClient.common_project_path(project) assert expected == actual @@ -8956,7 +8730,6 @@ def test_parse_common_project_path(): def test_common_location_path(): project = "squid" location = "clam" - expected = "projects/{project}/locations/{location}".format( project=project, location=location, ) @@ -8983,7 +8756,7 @@ def test_client_withDEFAULT_CLIENT_INFO(): transports.DlpServiceTransport, "_prep_wrapped_messages" ) as prep: client = DlpServiceClient( - credentials=credentials.AnonymousCredentials(), client_info=client_info, + credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -8992,6 +8765,6 @@ def test_client_withDEFAULT_CLIENT_INFO(): ) as prep: transport_class = DlpServiceClient.get_transport_class() transport = transport_class( - credentials=credentials.AnonymousCredentials(), client_info=client_info, + credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info)